Fixed AttributeError: 'NoneType' object has no attribute 'is_scene' in scene_numbering code.

Show root dirs can not be set from general config menu.
Mass editing shows now has the ability to delete root dirs as well as edit them.
Daily search no longer is restricted to just 1 week of results for searching from which now allows for replacing lower quality downloads with higher quality ones if available.
RSS Cache is updated for each provider on demand now when performing manual, failed, backlog, or daily searches.
This commit is contained in:
echel0n 2014-08-30 01:47:00 -07:00
parent 8e37afbcc5
commit ee458bd211
33 changed files with 181 additions and 189 deletions

View file

@ -26,6 +26,7 @@
#end if #end if
<script type="text/javascript" src="$sbRoot/js/config.js?$sbPID"></script> <script type="text/javascript" src="$sbRoot/js/config.js?$sbPID"></script>
<script type="text/javascript" src="$sbRoot/js/rootDirs.js?$sbPID"></script>
<div id="config"> <div id="config">
<div id="config-content"> <div id="config-content">
@ -167,6 +168,14 @@
</label> </label>
</div> </div>
<div class="field-pair">
<label class="clearfix">
<span class="component-title">Show Root Directories</span>
<span class="component-desc">Set root directories for where you want your shows to be.</span>
#include $os.path.join($sickbeard.PROG_DIR, "gui/slick/interfaces/default/inc_rootDirs.tmpl")
</label>
</div>
<input type="submit" class="btn config_submitter" value="Save Changes" /> <input type="submit" class="btn config_submitter" value="Save Changes" />
</fieldset> </fieldset>
</div><!-- /component-group1 //--> </div><!-- /component-group1 //-->

View file

@ -27,6 +27,7 @@
#set $cur_index = $root_dir_list.index($cur_dir) #set $cur_index = $root_dir_list.index($cur_dir)
<div style="padding: 6px 0 3px 25px;"> <div style="padding: 6px 0 3px 25px;">
<input class="btn edit_root_dir" type="button" class="edit_root_dir" id="edit_root_dir_$cur_index" value="Edit" /> <input class="btn edit_root_dir" type="button" class="edit_root_dir" id="edit_root_dir_$cur_index" value="Edit" />
<input class="btn delete_root_dir" type="button" class="delete_root_dir" id="delete_root_dir_$cur_index" value="Delete" />
$cur_dir => <span id="display_new_root_dir_$cur_index">$cur_dir</span> $cur_dir => <span id="display_new_root_dir_$cur_index">$cur_dir</span>
</div> </div>
<input type="hidden" name="orig_root_dir_$cur_index" value="$cur_dir" /> <input type="hidden" name="orig_root_dir_$cur_index" value="$cur_dir" />

View file

@ -22,4 +22,10 @@ $(document).ready(function(){
}); });
$('.delete_root_dir').click(function(){
var cur_id = find_dir_index($(this).attr('id'));
$('#new_root_dir_'+cur_id).val(null);
$('#display_new_root_dir_'+cur_id).html('<b>DELETED</b>');
});
}); });

View file

@ -40,67 +40,37 @@ class DailySearcher():
self.amActive = True self.amActive = True
didSearch = False logger.log(u"Searching for coming episodes and 1 weeks worth of previously WANTED episodes ...")
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive() and x.enable_daily] curDate = datetime.date.today().toordinal()
for curProviderCount, curProvider in enumerate(providers):
logger.log(u"Updating [" + curProvider.name + "] RSS cache ...") myDB = db.DBConnection()
sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE status = ? AND season > 0 AND airdate <= ?",
[common.UNAIRED, curDate])
sql_l = []
for sqlEp in sqlResults:
try: try:
curProvider.cache.updateCache() show = helpers.findCertainShow(sickbeard.showList, int(sqlEp["showid"]))
except exceptions.AuthException, e: except exceptions.MultipleShowObjectsException:
logger.log(u"Authentication error: " + ex(e), logger.ERROR) logger.log(u"ERROR: expected to find a single show matching " + sqlEp["showid"])
continue
except Exception, e:
logger.log(u"Error while updating cache for " + curProvider.name + ", skipping: " + ex(e), logger.ERROR)
logger.log(traceback.format_exc(), logger.DEBUG)
continue continue
didSearch = True ep = show.getEpisode(int(sqlEp["season"]), int(sqlEp["episode"]))
with ep.lock:
if ep.show.paused:
ep.status = common.SKIPPED
else:
ep.status = common.WANTED
if didSearch: sql_l.append(ep.get_sql())
logger.log(u"Searching for coming episodes and 1 weeks worth of previously WANTED episodes ...")
fromDate = datetime.date.today() - datetime.timedelta(weeks=1)
curDate = datetime.date.today()
if len(sql_l) > 0:
myDB = db.DBConnection() myDB = db.DBConnection()
sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE status in (?,?) AND airdate >= ? AND airdate <= ?", myDB.mass_action(sql_l)
[common.UNAIRED, common.WANTED, fromDate.toordinal(), curDate.toordinal()])
sql_l = [] # queue episode for daily search
for sqlEp in sqlResults: dailysearch_queue_item = sickbeard.search_queue.DailySearchQueueItem()
try: sickbeard.searchQueueScheduler.action.add_item(dailysearch_queue_item)
show = helpers.findCertainShow(sickbeard.showList, int(sqlEp["showid"]))
except exceptions.MultipleShowObjectsException:
logger.log(u"ERROR: expected to find a single show matching " + sqlEp["showid"])
continue
ep = show.getEpisode(int(sqlEp["season"]), int(sqlEp["episode"]))
with ep.lock:
if ep.show.paused:
ep.status = common.SKIPPED
if ep.status == common.UNAIRED:
logger.log(u"New episode " + ep.prettyName() + " airs today, setting status to WANTED")
ep.status = common.WANTED
sql_l.append(ep.get_sql())
if ep.status == common.WANTED:
dailysearch_queue_item = sickbeard.search_queue.DailySearchQueueItem(show, [ep])
sickbeard.searchQueueScheduler.action.add_item(dailysearch_queue_item)
else:
logger.log(u"Could not find any wanted episodes for the last 7 days to search for")
if len(sql_l) > 0:
myDB = db.DBConnection()
myDB.mass_action(sql_l)
else:
logger.log(
u"No NZB/Torrent providers found or enabled in the sickrage config. Please check your settings.",
logger.ERROR)
self.amActive = False self.amActive = False

View file

@ -210,21 +210,17 @@ def _remove_file_failed(file):
except: except:
pass pass
def findCertainShow(showList, indexerid): def findCertainShow(showList, indexerid):
if not showList:
return None
results = [] results = []
if indexerid: if showList and indexerid:
results = filter(lambda x: int(x.indexerid) == int(indexerid), showList) results = filter(lambda x: int(x.indexerid) == int(indexerid), showList)
if len(results): if len(results) == 1:
return results[0] return results[0]
elif len(results) > 1: elif len(results) > 1:
raise MultipleShowObjectsException() raise MultipleShowObjectsException()
def makeDir(path): def makeDir(path):
if not ek.ek(os.path.isdir, path): if not ek.ek(os.path.isdir, path):
try: try:

View file

@ -133,7 +133,7 @@ class AnimezbCache(tvcache.TVCache):
# only poll Animezb every 20 minutes max # only poll Animezb every 20 minutes max
self.minTime = 20 self.minTime = 20
def _getDailyData(self): def _getRSSData(self):
params = { params = {
"cat": "anime".encode('utf-8'), "cat": "anime".encode('utf-8'),

View file

@ -274,7 +274,7 @@ class BitSoupCache(tvcache.TVCache):
# only poll TorrentBytes every 20 minutes max # only poll TorrentBytes every 20 minutes max
self.minTime = 20 self.minTime = 20
def _getDailyData(self): def _getRSSData(self):
search_params = {'RSS': ['']} search_params = {'RSS': ['']}
return self.provider._doSearch(search_params) return self.provider._doSearch(search_params)

View file

@ -297,7 +297,7 @@ class BTNCache(tvcache.TVCache):
# At least 15 minutes between queries # At least 15 minutes between queries
self.minTime = 15 self.minTime = 15
def _getDailyData(self): def _getRSSData(self):
# Get the torrents uploaded since last check. # Get the torrents uploaded since last check.
seconds_since_last_update = math.ceil(time.time() - time.mktime(self._getLastUpdate().timetuple())) seconds_since_last_update = math.ceil(time.time() - time.mktime(self._getLastUpdate().timetuple()))

View file

@ -174,7 +174,7 @@ class EZRSSCache(tvcache.TVCache):
# only poll EZRSS every 15 minutes max # only poll EZRSS every 15 minutes max
self.minTime = 15 self.minTime = 15
def _getDailyData(self): def _getRSSData(self):
rss_url = self.provider.url + 'feed/' rss_url = self.provider.url + 'feed/'
logger.log(self.provider.name + " cache update URL: " + rss_url, logger.DEBUG) logger.log(self.provider.name + " cache update URL: " + rss_url, logger.DEBUG)

View file

@ -128,7 +128,7 @@ class FanzubCache(tvcache.TVCache):
# only poll Fanzub every 20 minutes max # only poll Fanzub every 20 minutes max
self.minTime = 20 self.minTime = 20
def _getDailyData(self): def _getRSSData(self):
params = { params = {
"cat": "anime".encode('utf-8'), "cat": "anime".encode('utf-8'),

View file

@ -306,7 +306,7 @@ class FreshOnTVCache(tvcache.TVCache):
# poll delay in minutes # poll delay in minutes
self.minTime = 20 self.minTime = 20
def _getDailyData(self): def _getRSSData(self):
search_params = {'RSS': ['']} search_params = {'RSS': ['']}
return self.provider._doSearch(search_params) return self.provider._doSearch(search_params)

View file

@ -195,8 +195,8 @@ class GenericProvider:
return True return True
def searchRSS(self, episodes): def searchRSS(self):
return self.cache.findNeededEpisodes(episodes) return self.cache.findNeededEpisodes()
def getQuality(self, item, anime=False): def getQuality(self, item, anime=False):
""" """
@ -255,10 +255,15 @@ class GenericProvider:
searched_scene_season = None searched_scene_season = None
for epObj in episodes: for epObj in episodes:
# check cache for results # search cache for episode result
cacheResult = self.cache.searchCache([epObj], manualSearch) cacheResult = self.cache.searchCache(epObj, manualSearch)
if len(cacheResult): if cacheResult:
results.update({epObj.episode: cacheResult[epObj]}) if epObj not in results:
results = [cacheResult]
else:
results.append(cacheResult)
# found result, search next episode
continue continue
# skip if season already searched # skip if season already searched

View file

@ -204,7 +204,7 @@ class HDBitsCache(tvcache.TVCache):
# only poll HDBits every 15 minutes max # only poll HDBits every 15 minutes max
self.minTime = 15 self.minTime = 15
def _getDailyData(self): def _getRSSData(self):
parsedJSON = self.provider.getURL(self.provider.rss_url, post_data=self.provider._make_post_data_JSON(), json=True) parsedJSON = self.provider.getURL(self.provider.rss_url, post_data=self.provider._make_post_data_JSON(), json=True)
if not self.provider._checkAuthFromData(parsedJSON): if not self.provider._checkAuthFromData(parsedJSON):

View file

@ -336,7 +336,7 @@ class HDTorrentsCache(tvcache.TVCache):
# only poll HDTorrents every 10 minutes max # only poll HDTorrents every 10 minutes max
self.minTime = 20 self.minTime = 20
def _getDailyData(self): def _getRSSData(self):
search_params = {'RSS': []} search_params = {'RSS': []}
return self.provider._doSearch(search_params) return self.provider._doSearch(search_params)

View file

@ -276,7 +276,7 @@ class IPTorrentsCache(tvcache.TVCache):
# Only poll IPTorrents every 10 minutes max # Only poll IPTorrents every 10 minutes max
self.minTime = 10 self.minTime = 10
def _getDailyData(self): def _getRSSData(self):
search_params = {'RSS': ['']} search_params = {'RSS': ['']}
return self.provider._doSearch(search_params) return self.provider._doSearch(search_params)

View file

@ -355,7 +355,7 @@ class KATCache(tvcache.TVCache):
# only poll ThePirateBay every 10 minutes max # only poll ThePirateBay every 10 minutes max
self.minTime = 20 self.minTime = 20
def _getDailyData(self): def _getRSSData(self):
search_params = {'RSS': ['rss']} search_params = {'RSS': ['rss']}
return self.provider._doSearch(search_params) return self.provider._doSearch(search_params)

View file

@ -337,21 +337,18 @@ class NewznabCache(tvcache.TVCache):
def updateCache(self): def updateCache(self):
# delete anything older then 7 days if self.shouldUpdate() and self._checkAuth(None):
self._clearCache()
if not self.shouldUpdate():
return
if self._checkAuth(None):
data = self._getRSSData() data = self._getRSSData()
# as long as the http request worked we count this as an update # as long as the http request worked we count this as an update
if data: if not data:
self.setLastUpdate()
else:
return [] return []
self.setLastUpdate()
# clear cache
self._clearCache()
if self._checkAuth(data): if self._checkAuth(data):
items = data.entries items = data.entries
cl = [] cl = []
@ -370,7 +367,6 @@ class NewznabCache(tvcache.TVCache):
return [] return []
# overwrite method with that parses the rageid from the newznab feed # overwrite method with that parses the rageid from the newznab feed
def _parseItem(self, item): def _parseItem(self, item):
title = item.title title = item.title

View file

@ -318,7 +318,7 @@ class NextGenCache(tvcache.TVCache):
# Only poll NextGen every 10 minutes max # Only poll NextGen every 10 minutes max
self.minTime = 10 self.minTime = 10
def _getDailyData(self): def _getRSSData(self):
search_params = {'RSS': ['']} search_params = {'RSS': ['']}
return self.provider._doSearch(search_params) return self.provider._doSearch(search_params)

View file

@ -126,7 +126,7 @@ class NyaaCache(tvcache.TVCache):
# only poll NyaaTorrents every 15 minutes max # only poll NyaaTorrents every 15 minutes max
self.minTime = 15 self.minTime = 15
def _getDailyData(self): def _getRSSData(self):
params = { params = {
"page": 'rss', # Use RSS page "page": 'rss', # Use RSS page
"order": '1', # Sort Descending By Date "order": '1', # Sort Descending By Date

View file

@ -174,7 +174,7 @@ class OmgwtfnzbsCache(tvcache.TVCache):
return (title, url) return (title, url)
def _getDailyData(self): def _getRSSData(self):
params = {'user': provider.username, params = {'user': provider.username,
'api': provider.api_key, 'api': provider.api_key,
'eng': 1, 'eng': 1,

View file

@ -102,7 +102,7 @@ class TorrentRssProvider(generic.TorrentProvider):
if not cookie_validator.match(self.cookies): if not cookie_validator.match(self.cookies):
return (False, 'Cookie is not correctly formatted: ' + self.cookies) return (False, 'Cookie is not correctly formatted: ' + self.cookies)
items = self.cache._getDailyData() items = self.cache._getRSSData()
if not len(items) > 0: if not len(items) > 0:
return (False, 'No items found in the RSS feed ' + self.url) return (False, 'No items found in the RSS feed ' + self.url)
@ -157,7 +157,7 @@ class TorrentRssCache(tvcache.TVCache):
tvcache.TVCache.__init__(self, provider) tvcache.TVCache.__init__(self, provider)
self.minTime = 15 self.minTime = 15
def _getDailyData(self): def _getRSSData(self):
logger.log(u"TorrentRssCache cache update URL: " + self.provider.url, logger.DEBUG) logger.log(u"TorrentRssCache cache update URL: " + self.provider.url, logger.DEBUG)
request_headers = None request_headers = None

View file

@ -312,7 +312,7 @@ class SCCCache(tvcache.TVCache):
# only poll SCC every 10 minutes max # only poll SCC every 10 minutes max
self.minTime = 20 self.minTime = 20
def _getDailyData(self): def _getRSSData(self):
search_params = {'RSS': ['']} search_params = {'RSS': ['']}
return self.provider._doSearch(search_params) return self.provider._doSearch(search_params)

View file

@ -252,7 +252,7 @@ class SpeedCDCache(tvcache.TVCache):
# only poll Speedcd every 20 minutes max # only poll Speedcd every 20 minutes max
self.minTime = 20 self.minTime = 20
def _getDailyData(self): def _getRSSData(self):
search_params = {'RSS': ['']} search_params = {'RSS': ['']}
return self.provider._doSearch(search_params) return self.provider._doSearch(search_params)

View file

@ -338,7 +338,7 @@ class ThePirateBayCache(tvcache.TVCache):
# only poll ThePirateBay every 10 minutes max # only poll ThePirateBay every 10 minutes max
self.minTime = 20 self.minTime = 20
def _getDailyData(self): def _getRSSData(self):
search_params = {'RSS': ['rss']} search_params = {'RSS': ['rss']}
return self.provider._doSearch(search_params) return self.provider._doSearch(search_params)

View file

@ -274,7 +274,7 @@ class TorrentBytesCache(tvcache.TVCache):
# only poll TorrentBytes every 20 minutes max # only poll TorrentBytes every 20 minutes max
self.minTime = 20 self.minTime = 20
def _getDailyData(self): def _getRSSData(self):
search_params = {'RSS': ['']} search_params = {'RSS': ['']}
return self.provider._doSearch(search_params) return self.provider._doSearch(search_params)

View file

@ -277,7 +277,7 @@ class TorrentDayCache(tvcache.TVCache):
# Only poll IPTorrents every 10 minutes max # Only poll IPTorrents every 10 minutes max
self.minTime = 10 self.minTime = 10
def _getDailyData(self): def _getRSSData(self):
search_params = {'RSS': ['']} search_params = {'RSS': ['']}
return self.provider._doSearch(search_params) return self.provider._doSearch(search_params)

View file

@ -275,7 +275,7 @@ class TorrentLeechCache(tvcache.TVCache):
# only poll TorrentLeech every 20 minutes max # only poll TorrentLeech every 20 minutes max
self.minTime = 20 self.minTime = 20
def _getDailyData(self): def _getRSSData(self):
search_params = {'RSS': ['']} search_params = {'RSS': ['']}
return self.provider._doSearch(search_params) return self.provider._doSearch(search_params)

View file

@ -83,7 +83,7 @@ class TvTorrentsCache(tvcache.TVCache):
# only poll TvTorrents every 15 minutes max # only poll TvTorrents every 15 minutes max
self.minTime = 15 self.minTime = 15
def _getDailyData(self): def _getRSSData(self):
# These will be ignored on the serverside. # These will be ignored on the serverside.
ignore_regex = "all.month|month.of|season[\s\d]*complete" ignore_regex = "all.month|month.of|season[\s\d]*complete"

View file

@ -53,7 +53,7 @@ def get_scene_numbering(indexer_id, indexer, season, episode, fallback_to_xem=Tr
return (season, episode) return (season, episode)
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(indexer_id)) showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(indexer_id))
if not showObj.is_scene: if showObj and not showObj.is_scene:
return (season, episode) return (season, episode)
result = find_scene_numbering(int(indexer_id), int(indexer), season, episode) result = find_scene_numbering(int(indexer_id), int(indexer), season, episode)
@ -105,7 +105,7 @@ def get_scene_absolute_numbering(indexer_id, indexer, absolute_number, fallback_
indexer = int(indexer) indexer = int(indexer)
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, indexer_id) showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, indexer_id)
if not showObj.is_scene: if showObj and not showObj.is_scene:
return absolute_number return absolute_number
result = find_scene_absolute_numbering(indexer_id, indexer, absolute_number) result = find_scene_absolute_numbering(indexer_id, indexer, absolute_number)

View file

@ -336,7 +336,7 @@ def filterSearchResults(show, season, results):
return foundResults return foundResults
def searchForNeededEpisodes(show, episodes): def searchForNeededEpisodes():
foundResults = {} foundResults = {}
didSearch = False didSearch = False
@ -344,15 +344,13 @@ def searchForNeededEpisodes(show, episodes):
origThreadName = threading.currentThread().name origThreadName = threading.currentThread().name
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive() and x.enable_daily] providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive() and x.enable_daily]
for curProviderCount, curProvider in enumerate(providers): for curProvider in providers:
if curProvider.anime_only and not show.is_anime:
logger.log(u"" + str(show.name) + " is not an anime, skiping", logger.DEBUG)
continue
threading.currentThread().name = origThreadName + " :: [" + curProvider.name + "]" threading.currentThread().name = origThreadName + " :: [" + curProvider.name + "]"
try: try:
curFoundResults = curProvider.searchRSS(episodes) curProvider.cache.updateCache()
curFoundResults = curProvider.searchRSS()
except exceptions.AuthException, e: except exceptions.AuthException, e:
logger.log(u"Authentication error: " + ex(e), logger.ERROR) logger.log(u"Authentication error: " + ex(e), logger.ERROR)
continue continue
@ -374,6 +372,12 @@ def searchForNeededEpisodes(show, episodes):
logger.DEBUG) logger.DEBUG)
continue continue
# find the best result for the current episode
bestResult = None
for curResult in curFoundResults[curEp]:
if not bestResult or bestResult.quality < curResult.quality:
bestResult = curResult
bestResult = pickBestResult(curFoundResults[curEp], curEp.show) bestResult = pickBestResult(curFoundResults[curEp], curEp.show)
# if all results were rejected move on to the next episode # if all results were rejected move on to the next episode
@ -400,7 +404,7 @@ def searchForNeededEpisodes(show, episodes):
u"No NZB/Torrent providers found or enabled in the sickrage config for daily searches. Please check your settings.", u"No NZB/Torrent providers found or enabled in the sickrage config for daily searches. Please check your settings.",
logger.ERROR) logger.ERROR)
return foundResults.values() if len(foundResults) else {} return foundResults.values()
def searchProviders(show, season, episodes, manualSearch=False): def searchProviders(show, season, episodes, manualSearch=False):
@ -409,6 +413,9 @@ def searchProviders(show, season, episodes, manualSearch=False):
didSearch = False didSearch = False
# build name cache for show
sickbeard.name_cache.buildNameCache(show)
# check if we want to search for season packs instead of just season/episode # check if we want to search for season packs instead of just season/episode
seasonSearch = False seasonSearch = False
if not manualSearch: if not manualSearch:
@ -442,6 +449,7 @@ def searchProviders(show, season, episodes, manualSearch=False):
logger.log(u"Searching for episodes we need from " + show.name + " Season " + str(season)) logger.log(u"Searching for episodes we need from " + show.name + " Season " + str(season))
try: try:
curProvider.cache.updateCache()
searchResults = curProvider.findSearchResults(show, season, episodes, search_mode, manualSearch) searchResults = curProvider.findSearchResults(show, season, episodes, search_mode, manualSearch)
except exceptions.AuthException, e: except exceptions.AuthException, e:
logger.log(u"Authentication error: " + ex(e), logger.ERROR) logger.log(u"Authentication error: " + ex(e), logger.ERROR)

View file

@ -29,6 +29,7 @@ from sickbeard import logger
from sickbeard import ui from sickbeard import ui
from sickbeard import common from sickbeard import common
class BacklogSearchScheduler(scheduler.Scheduler): class BacklogSearchScheduler(scheduler.Scheduler):
def forceSearch(self): def forceSearch(self):
self.action._set_lastBacklog(1) self.action._set_lastBacklog(1)
@ -40,11 +41,12 @@ class BacklogSearchScheduler(scheduler.Scheduler):
else: else:
return datetime.date.fromordinal(self.action._lastBacklog + self.action.cycleTime) return datetime.date.fromordinal(self.action._lastBacklog + self.action.cycleTime)
class BacklogSearcher: class BacklogSearcher:
def __init__(self): def __init__(self):
self._lastBacklog = self._get_lastBacklog() self._lastBacklog = self._get_lastBacklog()
self.cycleTime = sickbeard.BACKLOG_FREQUENCY/60/24 self.cycleTime = sickbeard.BACKLOG_FREQUENCY / 60 / 24
self.lock = threading.Lock() self.lock = threading.Lock()
self.amActive = False self.amActive = False
self.amPaused = False self.amPaused = False
@ -99,7 +101,7 @@ class BacklogSearcher:
if len(segments): if len(segments):
backlog_queue_item = search_queue.BacklogQueueItem(curShow, segments) backlog_queue_item = search_queue.BacklogQueueItem(curShow, segments)
sickbeard.searchQueueScheduler.action.add_item(backlog_queue_item) #@UndefinedVariable sickbeard.searchQueueScheduler.action.add_item(backlog_queue_item) # @UndefinedVariable
else: else:
logger.log(u"Nothing needs to be downloaded for " + str(curShow.name) + ", skipping this season", logger.log(u"Nothing needs to be downloaded for " + str(curShow.name) + ", skipping this season",
logger.DEBUG) logger.DEBUG)
@ -132,14 +134,14 @@ class BacklogSearcher:
return self._lastBacklog return self._lastBacklog
def _get_segments(self, show, fromDate): def _get_segments(self, show, fromDate):
anyQualities, bestQualities = common.Quality.splitQuality(show.quality) #@UnusedVariable anyQualities, bestQualities = common.Quality.splitQuality(show.quality) # @UnusedVariable
logger.log(u"Seeing if we need anything from " + show.name) logger.log(u"Seeing if we need anything from " + show.name)
myDB = db.DBConnection() myDB = db.DBConnection()
if show.air_by_date: if show.air_by_date:
sqlResults = myDB.select( sqlResults = myDB.select(
"SELECT ep.status, ep.season, ep.episode FROM tv_episodes ep, tv_shows show WHERE season != 0 AND ep.showid = show.indexer_id AND show.paused = 0 ANd ep.airdate > ? AND ep.showid = ? AND show.air_by_date = 1", "SELECT ep.status, ep.season, ep.episode FROM tv_episodes ep, tv_shows show WHERE season != 0 AND ep.showid = show.indexer_id AND show.paused = 0 AND ep.airdate > ? AND ep.showid = ? AND show.air_by_date = 1",
[fromDate.toordinal(), show.indexerid]) [fromDate.toordinal(), show.indexerid])
else: else:
sqlResults = myDB.select( sqlResults = myDB.select(

View file

@ -72,34 +72,27 @@ class SearchQueue(generic_queue.GenericQueue):
return False return False
def add_item(self, item): def add_item(self, item):
if isinstance(item, DailySearchQueueItem) or (
if isinstance(item, (DailySearchQueueItem, BacklogQueueItem, ManualSearchQueueItem, FailedQueueItem)) \ isinstance(item, (BacklogQueueItem, ManualSearchQueueItem, FailedQueueItem)) and not self.is_in_queue(
and not self.is_in_queue(item.show, item.segment): item.show, item.segment)):
sickbeard.name_cache.buildNameCache(item.show)
generic_queue.GenericQueue.add_item(self, item) generic_queue.GenericQueue.add_item(self, item)
else: else:
logger.log(u"Not adding item, it's already in the queue", logger.DEBUG) logger.log(u"Not adding item, it's already in the queue", logger.DEBUG)
class DailySearchQueueItem(generic_queue.QueueItem): class DailySearchQueueItem(generic_queue.QueueItem):
def __init__(self, show, segment): def __init__(self):
generic_queue.QueueItem.__init__(self, 'Daily Search', DAILY_SEARCH) generic_queue.QueueItem.__init__(self, 'Daily Search', DAILY_SEARCH)
self.priority = generic_queue.QueuePriorities.HIGH
self.name = 'DAILYSEARCH-' + str(show.indexerid)
self.show = show
self.segment = segment
def run(self): def run(self):
generic_queue.QueueItem.run(self) generic_queue.QueueItem.run(self)
try: try:
logger.log("Beginning daily search for new episodes")
logger.log("Beginning daily search for [" + self.show.name + "]") foundResults = search.searchForNeededEpisodes()
foundResults = search.searchForNeededEpisodes(self.show, self.segment)
if not len(foundResults): if not len(foundResults):
logger.log(u"No needed episodes found during daily search for [" + self.show.name + "]") logger.log(u"No needed episodes found")
else: else:
for result in foundResults: for result in foundResults:
# just use the first result for now # just use the first result for now
@ -115,6 +108,7 @@ class DailySearchQueueItem(generic_queue.QueueItem):
self.finish() self.finish()
class ManualSearchQueueItem(generic_queue.QueueItem): class ManualSearchQueueItem(generic_queue.QueueItem):
def __init__(self, show, segment): def __init__(self, show, segment):
generic_queue.QueueItem.__init__(self, 'Manual Search', MANUAL_SEARCH) generic_queue.QueueItem.__init__(self, 'Manual Search', MANUAL_SEARCH)
@ -169,7 +163,7 @@ class BacklogQueueItem(generic_queue.QueueItem):
try: try:
for season in self.segment: for season in self.segment:
sickbeard.searchBacklog.BacklogSearcher.currentSearchInfo = { sickbeard.searchBacklog.BacklogSearcher.currentSearchInfo = {
'title': self.show.name + " Season " + str(season)} 'title': self.show.name + " Season " + str(season)}
wantedEps = self.segment[season] wantedEps = self.segment[season]

View file

@ -89,26 +89,17 @@ class TVCache():
def _clearCache(self): def _clearCache(self):
if self.shouldClearCache(): if self.shouldClearCache():
logger.log(u"Clearing items older than 1 week from " + self.provider.name + " cache")
curDate = datetime.date.today() - datetime.timedelta(weeks=1)
myDB = self._getDB() myDB = self._getDB()
myDB.action("DELETE FROM [" + self.providerID + "] WHERE time < ?", [int(time.mktime(curDate.timetuple()))]) myDB.action("DELETE FROM [" + self.providerID + "] WHERE 1")
def _get_title_and_url(self, item): def _get_title_and_url(self, item):
# override this in the provider if daily search has a different data layout to backlog searches # override this in the provider if daily search has a different data layout to backlog searches
return self.provider._get_title_and_url(item) return self.provider._get_title_and_url(item)
def _getRSSData(self): def _getRSSData(self):
data = None data = None
return data return data
def _getDailyData(self):
return None
def _checkAuth(self): def _checkAuth(self):
return self.provider._checkAuth() return self.provider._checkAuth()
@ -116,10 +107,9 @@ class TVCache():
return True return True
def updateCache(self): def updateCache(self):
if self.shouldUpdate() and self._checkAuth(): if self.shouldUpdate() and self._checkAuth():
# as long as the http request worked we count this as an update # as long as the http request worked we count this as an update
data = self._getDailyData() data = self._getRSSData()
if not data: if not data:
return [] return []
@ -289,9 +279,9 @@ class TVCache():
[name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality, release_group, version]] [name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality, release_group, version]]
def searchCache(self, episodes, manualSearch=False): def searchCache(self, episode, manualSearch=False):
neededEps = self.findNeededEpisodes(episodes, manualSearch) neededEps = self.findNeededEpisodes(episode, manualSearch)
return neededEps return neededEps[episode]
def listPropers(self, date=None, delimiter="."): def listPropers(self, date=None, delimiter="."):
myDB = self._getDB() myDB = self._getDB()
@ -303,69 +293,84 @@ class TVCache():
return filter(lambda x: x['indexerid'] != 0, myDB.select(sql)) return filter(lambda x: x['indexerid'] != 0, myDB.select(sql))
def findNeededEpisodes(self, episodes, manualSearch=False): def findNeededEpisodes(self, episode=None, manualSearch=False):
neededEps = {} neededEps = {}
for epObj in episodes: if episode:
myDB = self._getDB() neededEps[episode] = []
myDB = self._getDB()
if not episode:
sqlResults = myDB.select("SELECT * FROM [" + self.providerID + "]")
else:
sqlResults = myDB.select( sqlResults = myDB.select(
"SELECT * FROM [" + self.providerID + "] WHERE indexerid = ? AND season = ? AND episodes LIKE ?", "SELECT * FROM [" + self.providerID + "] WHERE indexerid = ? AND season = ? AND episodes LIKE ?",
[epObj.show.indexerid, epObj.season, "%|" + str(epObj.episode) + "|%"]) [episode.show.indexerid, episode.season, "%|" + str(episode.episode) + "|%"])
# for each cache entry # for each cache entry
for curResult in sqlResults: for curResult in sqlResults:
# skip non-tv crap (but allow them for Newzbin cause we assume it's filtered well) # skip non-tv crap
if self.providerID != 'newzbin' and not show_name_helpers.filterBadReleases(curResult["name"]): if not show_name_helpers.filterBadReleases(curResult["name"]):
continue continue
# get the show object, or if it's not one of our shows then ignore it # get the show object, or if it's not one of our shows then ignore it
try: showObj = helpers.findCertainShow(sickbeard.showList, int(curResult["indexerid"]))
showObj = helpers.findCertainShow(sickbeard.showList, int(curResult["indexerid"])) if not showObj:
except MultipleShowObjectsException: continue
showObj = None
if not showObj: # skip if provider is anime only and show is not anime
continue if self.provider.anime_only and not showObj.is_anime:
logger.log(u"" + str(showObj.name) + " is not an anime, skiping", logger.DEBUG)
continue
# get season and ep data (ignoring multi-eps for now) # get season and ep data (ignoring multi-eps for now)
curSeason = int(curResult["season"]) curSeason = int(curResult["season"])
if curSeason == -1: if curSeason == -1:
continue continue
curEp = curResult["episodes"].split("|")[1] curEp = curResult["episodes"].split("|")[1]
if not curEp: if not curEp:
continue continue
curEp = int(curEp) curEp = int(curEp)
curQuality = int(curResult["quality"])
curReleaseGroup = curResult["release_group"]
curVersion = curResult["version"]
# if the show says we want that episode then add it to the list curQuality = int(curResult["quality"])
if not showObj.wantEpisode(curSeason, curEp, curQuality, manualSearch): curReleaseGroup = curResult["release_group"]
logger.log(u"Skipping " + curResult["name"] + " because we don't want an episode that's " + curVersion = curResult["version"]
Quality.qualityStrings[curQuality], logger.DEBUG)
continue
# build a result object # if the show says we want that episode then add it to the list
title = curResult["name"] if not showObj.wantEpisode(curSeason, curEp, curQuality, manualSearch):
url = curResult["url"] logger.log(u"Skipping " + curResult["name"] + " because we don't want an episode that's " +
Quality.qualityStrings[curQuality], logger.DEBUG)
continue
logger.log(u"Found result " + title + " at " + url) # build name cache for show
sickbeard.name_cache.buildNameCache(showObj)
result = self.provider.getResult([epObj]) if episode:
result.show = showObj epObj = episode
result.url = url else:
result.name = title epObj = showObj.getEpisode(curSeason, curEp)
result.quality = curQuality
result.release_group = curReleaseGroup
result.version = curVersion
result.content = None
# add it to the list # build a result object
if epObj not in neededEps: title = curResult["name"]
neededEps[epObj] = [result] url = curResult["url"]
else:
neededEps[epObj].append(result) logger.log(u"Found result " + title + " at " + url)
result = self.provider.getResult([epObj])
result.show = showObj
result.url = url
result.name = title
result.quality = curQuality
result.release_group = curReleaseGroup
result.version = curVersion
result.content = None
# add it to the list
if epObj not in neededEps:
neededEps[epObj] = [result]
else:
neededEps[epObj].append(result)
# datetime stamp this search so cache gets cleared # datetime stamp this search so cache gets cleared
self.setLastSearch() self.setLastSearch()