mirror of
https://github.com/SickGear/SickGear.git
synced 2024-11-30 16:33:37 +00:00
Fixed issues with search results not being snatched when searching shows with custom quality settings.
Added option to force a RSS update under Manage searches. Added status of rss updates and daily searches to be displayed at bottom of pages.
This commit is contained in:
parent
ca913d965b
commit
8ac8150eb3
6 changed files with 42 additions and 14 deletions
|
@ -14,6 +14,8 @@
|
|||
#set $numDLEpisodes = $myDB.select("SELECT COUNT(*) FROM tv_episodes WHERE status IN ("+",".join([str(x) for x in $Quality.DOWNLOADED + [$ARCHIVED]])+") AND season != 0 and episode != 0 AND airdate <= "+$today+"")[0][0]
|
||||
#set $numEpisodes = $myDB.select("SELECT COUNT(*) FROM tv_episodes WHERE season != 0 and episode != 0 AND (airdate != 1 OR status IN ("+",".join([str(x) for x in ($Quality.DOWNLOADED + $Quality.SNATCHED + $Quality.SNATCHED_PROPER) + [$ARCHIVED]])+")) AND airdate <= "+$today+" AND status != "+str($IGNORED)+"")[0][0]
|
||||
<b>$numShows shows</b> ($numGoodShows active) | <b>$numDLEpisodes/$numEpisodes</b> episodes downloaded |
|
||||
<b>RSS Update</b>: <%=str(sickbeard.updateRSSScheduler.timeLeft()).split('.')[0]%> |
|
||||
<b>Daily Search</b>: <%=str(sickbeard.dailySearchScheduler.timeLeft()).split('.')[0]%> |
|
||||
<b>Backlog</b>: $sbdatetime.sbdatetime.sbfdate($sickbeard.backlogSearchScheduler.nextRun())
|
||||
</div>
|
||||
<ul style="float:right;">
|
||||
|
|
|
@ -35,6 +35,15 @@ In Progress<br />
|
|||
#end if
|
||||
<br />
|
||||
|
||||
<h3>RSS Cache Update:</h3>
|
||||
<a class="btn" href="$sbRoot/manage/manageSearches/forceRSS"><i class="icon-exclamation-sign"></i> Force</a>
|
||||
#if not $rssStatus:
|
||||
Not in progress<br />
|
||||
#else:
|
||||
In Progress<br />
|
||||
#end if
|
||||
<br />
|
||||
|
||||
<h3>Version Check:</h3>
|
||||
<a class="btn" href="$sbRoot/manage/manageSearches/forceVersionCheck"><i class="icon-check"></i> Force Check</a>
|
||||
<br />
|
||||
|
|
1
lib/dateutil/zoneinfo/.gitignore
vendored
Normal file
1
lib/dateutil/zoneinfo/.gitignore
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
*.tar.gz
|
|
@ -31,6 +31,8 @@ class RSSUpdater():
|
|||
self.amActive = False
|
||||
|
||||
def run(self):
|
||||
self.amActive = True
|
||||
|
||||
# remove names from cache that link back to active shows that we watch
|
||||
sickbeard.name_cache.syncNameCache()
|
||||
|
||||
|
@ -38,4 +40,6 @@ class RSSUpdater():
|
|||
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive()]
|
||||
for provider in providers:
|
||||
logger.log(u"Updating RSS cache for provider [" + provider.name + "]")
|
||||
provider.cache.updateCache()
|
||||
provider.cache.updateCache()
|
||||
|
||||
self.amActive = False
|
|
@ -534,23 +534,23 @@ def searchProviders(queueItem, show, season, episodes, seasonSearch=False, manua
|
|||
continue
|
||||
|
||||
# add result if its not a duplicate and
|
||||
if isFinalResult(bestResult):
|
||||
found = False
|
||||
for i, result in enumerate(queueItem.results):
|
||||
for bestResultEp in bestResult.episodes:
|
||||
if bestResultEp in result.episodes:
|
||||
if result.quality < bestResult.quality:
|
||||
queueItem.results.pop(i)
|
||||
else:
|
||||
found = True
|
||||
if not found:
|
||||
queueItem.results += [bestResult]
|
||||
found = False
|
||||
for i, result in enumerate(queueItem.results):
|
||||
for bestResultEp in bestResult.episodes:
|
||||
if bestResultEp in result.episodes:
|
||||
if result.quality < bestResult.quality:
|
||||
queueItem.results.pop(i)
|
||||
else:
|
||||
found = True
|
||||
if not found:
|
||||
queueItem.results += [bestResult]
|
||||
|
||||
|
||||
# check that we got all the episodes we wanted first before doing a match and snatch
|
||||
wantedEpCount = 0
|
||||
for wantedEp in episodes:
|
||||
for result in queueItem.results:
|
||||
if wantedEp in result.episodes:
|
||||
if wantedEp in result.episodes and isFinalResult(result):
|
||||
wantedEpCount += 1
|
||||
|
||||
# make sure we search every provider for results unless we found everything we wanted
|
||||
|
|
|
@ -207,6 +207,7 @@ class ManageSearches:
|
|||
t.backlogPaused = sickbeard.searchQueueScheduler.action.is_backlog_paused() # @UndefinedVariable
|
||||
t.backlogRunning = sickbeard.searchQueueScheduler.action.is_backlog_in_progress() # @UndefinedVariable
|
||||
t.searchStatus = sickbeard.dailySearchScheduler.action.amActive # @UndefinedVariable
|
||||
t.rssStatus = sickbeard.updateRSSScheduler.action.amActive # @UndefinedVariable
|
||||
|
||||
t.submenu = ManageMenu()
|
||||
|
||||
|
@ -219,7 +220,18 @@ class ManageSearches:
|
|||
result = sickbeard.dailySearchScheduler.forceRun()
|
||||
if result:
|
||||
logger.log(u"Daily search forced")
|
||||
ui.notifications.message('Daily search started',
|
||||
ui.notifications.message('Daily search for new releases started')
|
||||
|
||||
redirect("/manage/manageSearches/")
|
||||
|
||||
@cherrypy.expose
|
||||
def forceRSS(self):
|
||||
|
||||
# force it to run the next time it looks
|
||||
result = sickbeard.updateRSSScheduler.forceRun()
|
||||
if result:
|
||||
logger.log(u"RSS cache update forced")
|
||||
ui.notifications.message('RSS cache update started',
|
||||
'Note: RSS feeds may not be updated if retrieved recently')
|
||||
|
||||
redirect("/manage/manageSearches/")
|
||||
|
|
Loading…
Reference in a new issue