mirror of
https://github.com/SickGear/SickGear.git
synced 2024-12-03 18:03:37 +00:00
Merge pull request #348 from Prinz23/feature/ChangeShowSearchQueue
Show and Search Queue changes
This commit is contained in:
commit
8da6f15d2d
21 changed files with 1112 additions and 613 deletions
12
CHANGES.md
12
CHANGES.md
|
@ -1,4 +1,4 @@
|
|||
### 0.x.x (2015-xx-xx xx:xx:xx UTC)
|
||||
### 0.x.x (2015-xx-xx xx:xx:xx UTC)
|
||||
|
||||
* Update Tornado webserver to 4.2.dev1 (609dbb9)
|
||||
* Update change to suppress reporting of Tornado exception error 1 to updated package as listed in hacks.txt
|
||||
|
@ -49,6 +49,16 @@
|
|||
* Add clarity to the output of a successful post process but with some issues rather than "there were problems"
|
||||
* Add a conclusive bottom line to the pp result report
|
||||
* Change helpers doctests to unittests
|
||||
* Add Search Queue Overview page
|
||||
* Add expandable search queue details on the Manage Searches page
|
||||
* Fix failed status episodes not included in next_episode search function
|
||||
* Change prevent another show update from running if one is already running
|
||||
* Change split Force backlog button on the Manage Searches page into: Force Limited, Force Full
|
||||
* Change refactor properFinder to be part of the search
|
||||
* Change improve threading of generic_queue, show_queue and search_queue
|
||||
* Change disable the Force buttons on the Manage Searches page while a search is running
|
||||
* Change disable the Pause buttons on the Manage Searches page if a search is not running
|
||||
* Change staggered periods of testing and updating of all shows "ended" status up to 460 days
|
||||
|
||||
[develop changelog]
|
||||
* Fix issue, when adding existing shows, set its default group to ensure it now appears on the show list page
|
||||
|
|
|
@ -442,6 +442,7 @@ inc_top.tmpl
|
|||
content:"\e613"
|
||||
}
|
||||
|
||||
.sgicon-showqueue:before,
|
||||
.sgicon-refresh:before{
|
||||
content:"\e614"
|
||||
}
|
||||
|
|
|
@ -102,6 +102,7 @@
|
|||
$('#SubMenu a:contains("Processing")').addClass('btn').html('<i class="sgicon-postprocess"></i>Post-Processing');
|
||||
$('#SubMenu a:contains("Manage Searches")').addClass('btn').html('<i class="sgicon-search"></i>Manage Searches');
|
||||
$('#SubMenu a:contains("Manage Torrents")').addClass('btn').html('<i class="sgicon-bittorrent"></i>Manage Torrents');
|
||||
$('#SubMenu a:contains("Show Queue Overview")').addClass('btn').html('<i class="sgicon-showqueue"></i>Show Queue Overview');
|
||||
$('#SubMenu a[href$="/manage/failedDownloads/"]').addClass('btn').html('<i class="sgicon-failed"></i>Failed Downloads');
|
||||
$('#SubMenu a:contains("Notification")').addClass('btn').html('<i class="sgicon-notification"></i>Notifications');
|
||||
$('#SubMenu a:contains("Update show in XBMC")').addClass('btn').html('<i class="sgicon-xbmc"></i>Update show in XBMC');
|
||||
|
@ -166,6 +167,7 @@
|
|||
<li><a href="$sbRoot/manage/" tabindex="$tab#set $tab += 1#"><i class="sgicon-massupdate"></i>Mass Update</a></li>
|
||||
<li><a href="$sbRoot/manage/backlogOverview/" tabindex="$tab#set $tab += 1#"><i class="sgicon-backlog"></i>Backlog Overview</a></li>
|
||||
<li><a href="$sbRoot/manage/manageSearches/" tabindex="$tab#set $tab += 1#"><i class="sgicon-search"></i>Manage Searches</a></li>
|
||||
<li><a href="$sbRoot/manage/showQueueOverview/" tabindex="$tab#set $tab += 1#"><i class="sgicon-showqueue"></i>Show Queue Overview</a></li>
|
||||
<li><a href="$sbRoot/manage/episodeStatuses/" tabindex="$tab#set $tab += 1#"><i class="sgicon-episodestatus"></i>Episode Status Management</a></li>
|
||||
#if $sickbeard.USE_PLEX and $sickbeard.PLEX_SERVER_HOST != ''
|
||||
<li><a href="$sbRoot/home/updatePLEX/" tabindex="$tab#set $tab += 1#"><i class="sgicon-plex"></i>Update PLEX</a></li>
|
||||
|
|
|
@ -1,6 +1,4 @@
|
|||
#import sickbeard
|
||||
#import datetime
|
||||
#from sickbeard.common import *
|
||||
##
|
||||
#set global $title = 'Manage Searches'
|
||||
#set global $header = 'Manage Searches'
|
||||
|
@ -11,6 +9,7 @@
|
|||
#include $os.path.join($sickbeard.PROG_DIR, 'gui/slick/interfaces/default/inc_top.tmpl')
|
||||
|
||||
<script type="text/javascript" src="$sbRoot/js/plotTooltip.js?$sbPID"></script>
|
||||
<script type="text/javascript" src="$sbRoot/js/manageSearches.js?$sbPID"></script>
|
||||
<div id="content800">
|
||||
#if $varExists('header')
|
||||
<h1 class="header">$header</h1>
|
||||
|
@ -20,18 +19,19 @@
|
|||
|
||||
<div id="summary2" class="align-left">
|
||||
<h3>Backlog Search:</h3>
|
||||
<a class="btn" href="$sbRoot/manage/manageSearches/forceBacklog"><i class="sgicon-play"></i> Force</a>
|
||||
<a class="btn" href="$sbRoot/manage/manageSearches/pauseBacklog?paused=#if $backlogPaused then '0' else '1'#"><i class="#if $backlogPaused then 'sgicon-play' else 'sgicon-pause'#"></i> #if $backlogPaused then 'Unpause' else 'Pause'#</a>
|
||||
<a id="forcebacklog" class="btn#if $backlogRunning# disabled#end if#" href="$sbRoot/manage/manageSearches/forceLimitedBacklog"><i class="sgicon-play"></i> Force Limited</a>
|
||||
<a id="forcefullbacklog" class="btn#if $backlogRunning# disabled#end if#" href="$sbRoot/manage/manageSearches/forceFullBacklog"><i class="sgicon-play"></i> Force Full</a>
|
||||
<a id="pausebacklog" class="btn#if not $backlogRunning# disabled#end if#" href="$sbRoot/manage/manageSearches/pauseBacklog?paused=#if $backlogPaused then "0" else "1"#"><i class="#if $backlogPaused then "sgicon-play" else "sgicon-pause"#"></i> #if $backlogPaused then "Unpause" else "Pause"#</a>
|
||||
#if not $backlogRunning:
|
||||
Not in progress<br />
|
||||
#else
|
||||
#if $backlogPaused then 'Paused: ' else ''#
|
||||
Currently running<br />
|
||||
#end if
|
||||
Currently running ($backlogRunningType)<br />
|
||||
#end if
|
||||
<br />
|
||||
|
||||
<h3>Recent Search:</h3>
|
||||
<a class="btn" href="$sbRoot/manage/manageSearches/forceSearch"><i class="sgicon-play"></i> Force</a>
|
||||
<a id="recentsearch" class="btn#if $recentSearchStatus# disabled#end if#" href="$sbRoot/manage/manageSearches/forceSearch"><i class="sgicon-play"></i> Force</a>
|
||||
#if not $recentSearchStatus
|
||||
Not in progress<br />
|
||||
#else
|
||||
|
@ -40,7 +40,7 @@
|
|||
<br />
|
||||
|
||||
<h3>Find Propers Search:</h3>
|
||||
<a class="btn" href="$sbRoot/manage/manageSearches/forceFindPropers"><i class="sgicon-play"></i> Force</a>
|
||||
<a id="propersearch" class="btn#if $findPropersStatus# disabled#end if#" href="$sbRoot/manage/manageSearches/forceFindPropers"><i class="sgicon-play"></i> Force</a>
|
||||
#if not $findPropersStatus
|
||||
Not in progress<br />
|
||||
#else
|
||||
|
@ -53,10 +53,85 @@
|
|||
<br /><br />
|
||||
|
||||
<h3>Search Queue:</h3>
|
||||
Backlog: <i>$queueLength['backlog'] pending items</i><br />
|
||||
Recent: <i>$queueLength['recent'] pending items</i><br />
|
||||
Manual: <i>$queueLength['manual'] pending items</i><br />
|
||||
Failed: <i>$queueLength['failed'] pending items</i><br />
|
||||
#if $queueLength['backlog'] or $queueLength['manual'] or $queueLength['failed']
|
||||
<input type="button" class="show-all-more btn" id="all-btn-more" value="Expand All"><input type="button" class="show-all-less btn" id="all-btn-less" value="Collapse All"></br>
|
||||
#end if
|
||||
</br>
|
||||
Recent: <i>$queueLength['recent'] item$sickbeard.helpers.maybe_plural($queueLength['recent'])</i></br></br>
|
||||
Proper: <i>$queueLength['proper'] item$sickbeard.helpers.maybe_plural($queueLength['proper'])</i></br></br>
|
||||
Backlog: <i>$len($queueLength['backlog']) item$sickbeard.helpers.maybe_plural($len($queueLength['backlog']))</i>
|
||||
#if $queueLength['backlog']
|
||||
<input type="button" class="shows-more btn" id="backlog-btn-more" value="Expand" #if not $queueLength['backlog']# style="display:none" #end if#><input type="button" class="shows-less btn" id="backlog-btn-less" value="Collapse" style="display:none"></br>
|
||||
<table class="sickbeardTable manageTable" cellspacing="1" border="0" cellpadding="0" style="display:none">
|
||||
<thead></thead>
|
||||
<tbody>
|
||||
#set $row = 0
|
||||
#for $cur_item in $queueLength['backlog']:
|
||||
#set $search_type = 'On Demand'
|
||||
#if $cur_item[3]:
|
||||
#if $cur_item[5]:
|
||||
#set $search_type = 'Forced'
|
||||
#else
|
||||
#set $search_type = 'Scheduled'
|
||||
#end if
|
||||
#if $cur_item[4]:
|
||||
#set $search_type += ' (Limited)'
|
||||
#else
|
||||
#set $search_type += ' (Full)'
|
||||
#end if
|
||||
#end if
|
||||
<tr class="#echo ('odd', 'even')[$row % 2]##set $row+=1#">
|
||||
<td style="width:80%;text-align:left;color:white">
|
||||
<a class="whitelink" href="$sbRoot/home/displayShow?show=$cur_item[0]">$cur_item[1]</a> - $sickbeard.helpers.make_search_segment_html_string($cur_item[2])
|
||||
</td>
|
||||
<td style="width:20%;text-align:center;color:white">$search_type</td>
|
||||
</tr>
|
||||
#end for
|
||||
</tbody>
|
||||
</table>
|
||||
#else
|
||||
</br>
|
||||
#end if
|
||||
</br>
|
||||
Manual: <i>$len($queueLength['manual']) item$sickbeard.helpers.maybe_plural($len($queueLength['manual']))</i>
|
||||
#if $queueLength['manual']
|
||||
<input type="button" class="shows-more btn" id="manual-btn-more" value="Expand" #if not $queueLength['manual']# style="display:none" #end if#><input type="button" class="shows-less btn" id="manual-btn-less" value="Collapse" style="display:none"></br>
|
||||
<table class="sickbeardTable manageTable" cellspacing="1" border="0" cellpadding="0" style="display:none">
|
||||
<thead></thead>
|
||||
<tbody>
|
||||
#set $row = 0
|
||||
#for $cur_item in $queueLength['manual']:
|
||||
<tr class="#echo ('odd', 'even')[$row % 2]##set $row+=1#">
|
||||
<td style="width:100%;text-align:left;color:white">
|
||||
<a class="whitelink" href="$sbRoot/home/displayShow?show=$cur_item[0]">$cur_item[1]</a> - $sickbeard.helpers.make_search_segment_html_string($cur_item[2])
|
||||
</td>
|
||||
</tr>
|
||||
#end for
|
||||
</tbody>
|
||||
</table>
|
||||
#else
|
||||
</br>
|
||||
#end if
|
||||
</br>
|
||||
Failed: <i>$len($queueLength['failed']) item$sickbeard.helpers.maybe_plural($len($queueLength['failed']))</i>
|
||||
#if $queueLength['failed']
|
||||
<input type="button" class="shows-more btn" id="failed-btn-more" value="Expand" #if not $queueLength['failed']# style="display:none" #end if#><input type="button" class="shows-less btn" id="failed-btn-less" value="Collapse" style="display:none"></br>
|
||||
<table class="sickbeardTable manageTable" cellspacing="1" border="0" cellpadding="0" style="display:none">
|
||||
<thead></thead>
|
||||
<tbody>
|
||||
#set $row = 0
|
||||
#for $cur_item in $queueLength['failed']:
|
||||
<tr class="#echo ('odd', 'even')[$row % 2]##set $row+=1#">
|
||||
<td style="width:100%;text-align:left;color:white">
|
||||
<a class="whitelink" href="$sbRoot/home/displayShow?show=$cur_item[0]">$cur_item[1]</a> - $sickbeard.helpers.make_search_segment_html_string($cur_item[2])
|
||||
</td>
|
||||
</tr>
|
||||
#end for
|
||||
</tbody>
|
||||
</table>
|
||||
#else
|
||||
</br>
|
||||
#end if
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
|
151
gui/slick/interfaces/default/manage_showQueueOverview.tmpl
Normal file
151
gui/slick/interfaces/default/manage_showQueueOverview.tmpl
Normal file
|
@ -0,0 +1,151 @@
|
|||
#import sickbeard
|
||||
#from sickbeard.helpers import findCertainShow
|
||||
##
|
||||
#set global $title = 'Show Queue Overview'
|
||||
#set global $header = 'Show Queue Overview'
|
||||
#set global $sbPath = '..'
|
||||
#set global $topmenu = 'manage'
|
||||
##
|
||||
#import os.path
|
||||
#include $os.path.join($sickbeard.PROG_DIR, 'gui/slick/interfaces/default/inc_top.tmpl')
|
||||
|
||||
<script type="text/javascript" src="$sbRoot/js/manageShowQueueOverview.js?$sbPID" xmlns="http://www.w3.org/1999/html"></script>
|
||||
<div id="content800">
|
||||
#if $varExists('header')
|
||||
<h1 class="header">$header</h1>
|
||||
#else
|
||||
<h1 class="title">$title</h1>
|
||||
#end if
|
||||
|
||||
<div id="summary2" class="align-left">
|
||||
<h3> Daily Show Update:</h3>
|
||||
<a id="showupdatebutton" class="btn#if $ShowUpdateRunning# disabled#end if#" href="$sbRoot/manage/showQueueOverview/forceShowUpdate"><i class="sgicon-play"></i> Force</a>
|
||||
#if not $ShowUpdateRunning:
|
||||
Not in progress<br />
|
||||
#else:
|
||||
Currently running<br />
|
||||
#end if
|
||||
</br>
|
||||
<h3>Show Queue:</h3>
|
||||
</br>
|
||||
#if $queueLength['add'] or $queueLength['update'] or $queueLength['refresh'] or $queueLength['rename'] or $queueLength['subtitle']
|
||||
<input type="button" class="show-all-more btn" id="all-btn-more" value="Expand All"><input type="button" class="show-all-less btn" id="all-btn-less" value="Collapse All"></br>
|
||||
#end if
|
||||
</br>
|
||||
Add: <i>$len($queueLength['add']) show$sickbeard.helpers.maybe_plural($len($queueLength['add']))</i>
|
||||
#if $queueLength['add']
|
||||
<input type="button" class="shows-more btn" id="add-btn-more" value="Expand" #if not $queueLength['add']# style="display:none" #end if#><input type="button" class="shows-less btn" id="add-btn-less" value="Collapse" style="display:none"></br>
|
||||
<table class="sickbeardTable manageTable" cellspacing="1" border="0" cellpadding="0" style="display:none">
|
||||
<thead></thead>
|
||||
<tbody>
|
||||
#set $row = 0
|
||||
#for $cur_show in $queueLength['add']:
|
||||
#set $show_name = str($cur_show[0])
|
||||
<tr class="#echo ('odd', 'even')[$row % 2]##set $row+=1#">
|
||||
<td style="width:80%;text-align:left;color:white">$show_name</td>
|
||||
<td style="width:20%;text-align:center;color:white">#if $cur_show[1]#Scheduled#end if#</td>
|
||||
</tr>
|
||||
#end for
|
||||
</tbody>
|
||||
</table>
|
||||
#else
|
||||
</br>
|
||||
#end if
|
||||
</br>
|
||||
Update <span class="grey-text">(Forced / Forced Web)</span>: <i>$len($queueLength['update']) <span class="grey-text">($len($queueLength['forceupdate']) / $len($queueLength['forceupdateweb']))</span> show$sickbeard.helpers.maybe_plural($len($queueLength['update']))</i>
|
||||
#if $queueLength['update']
|
||||
<input type="button" class="shows-more btn" id="update-btn-more" value="Expand" #if not $queueLength['update']# style="display:none" #end if#><input type="button" class="shows-less btn" id="update-btn-less" value="Collapse" style="display:none"></br>
|
||||
<table class="sickbeardTable manageTable" cellspacing="1" border="0" cellpadding="0" style="display:none">
|
||||
<thead></thead>
|
||||
<tbody>
|
||||
#set $row = 0
|
||||
#for $cur_show in $queueLength['update']:
|
||||
#set $show = $findCertainShow($showList, $cur_show[0])
|
||||
#set $show_name = $show.name if $show else str($cur_show[0])
|
||||
<tr class="#echo ('odd', 'even')[$row % 2]##set $row+=1#">
|
||||
<td style="width:80%;text-align:left">
|
||||
<a class="whitelink" href="$sbRoot/home/displayShow?show=$cur_show[0]">$show_name</a>
|
||||
</td>
|
||||
<td style="width:20%;text-align:center;color:white">#if $cur_show[1]#Scheduled, #end if#$cur_show[2]</td>
|
||||
</tr>
|
||||
#end for
|
||||
</tbody>
|
||||
</table>
|
||||
#else
|
||||
</br>
|
||||
#end if
|
||||
</br>
|
||||
Refresh: <i>$len($queueLength['refresh']) show$sickbeard.helpers.maybe_plural($len($queueLength['refresh']))</i>
|
||||
#if $queueLength['refresh']
|
||||
<input type="button" class="shows-more btn" id="refresh-btn-more" value="Expand" #if not $queueLength['refresh']# style="display:none" #end if#><input type="button" class="shows-less btn" id="refresh-btn-less" value="Collapse" style="display:none"></br>
|
||||
<table class="sickbeardTable manageTable" cellspacing="1" border="0" cellpadding="0" style="display:none">
|
||||
<thead></thead>
|
||||
<tbody>
|
||||
#set $row = 0
|
||||
#for $cur_show in $queueLength['refresh']:
|
||||
#set $show = $findCertainShow($showList, $cur_show[0])
|
||||
#set $show_name = $show.name if $show else str($cur_show[0])
|
||||
<tr class="#echo ('odd', 'even')[$row % 2]##set $row+=1#">
|
||||
<td style="width:80%;text-align:left">
|
||||
<a class="whitelink" href="$sbRoot/home/displayShow?show=$cur_show[0]">$show_name</a>
|
||||
</td>
|
||||
<td style="width:20%;text-align:center;color:white">#if $cur_show[1]#Scheduled#end if#</td>
|
||||
</tr>
|
||||
#end for
|
||||
</tbody>
|
||||
</table>
|
||||
#else
|
||||
</br>
|
||||
#end if
|
||||
</br>
|
||||
Rename: <i>$len($queueLength['rename']) show$sickbeard.helpers.maybe_plural($len($queueLength['rename']))</i>
|
||||
#if $queueLength['rename']
|
||||
<input type="button" class="shows-more btn" id="rename-btn-more" value="Expand" #if not $queueLength['rename']# style="display:none" #end if#><input type="button" class="shows-less btn" id="rename-btn-less" value="Collapse" style="display:none"></br>
|
||||
|
||||
<table class="sickbeardTable manageTable" cellspacing="1" border="0" cellpadding="0" style="display:none">
|
||||
<thead></thead>
|
||||
<tbody>
|
||||
#set $row = 0
|
||||
#for $cur_show in $queueLength['rename']:
|
||||
#set $show = $findCertainShow($showList, $cur_show[0])
|
||||
#set $show_name = $show.name if $show else str($cur_show[0])
|
||||
<tr class="#echo ('odd', 'even')[$row % 2]##set $row+=1#">
|
||||
<td style="width:80%;text-align:left">
|
||||
<a class="whitelink" href="$sbRoot/home/displayShow?show=$cur_show[0]">$show_name</a>
|
||||
</td>
|
||||
<td style="width:20%;text-align:center;color:white">#if $cur_show[1]#Scheduled#end if#</td>
|
||||
</tr>
|
||||
#end for
|
||||
</tbody>
|
||||
</table>
|
||||
#else
|
||||
</br>
|
||||
#end if
|
||||
#if $sickbeard.USE_SUBTITLES
|
||||
</br>
|
||||
Subtitle: <i>$len($queueLength['subtitle']) show$sickbeard.helpers.maybe_plural($len($queueLength['subtitle']))</i>
|
||||
#if $queueLength['subtitle']
|
||||
<input type="button" class="shows-more btn" id="subtitle-btn-more" value="Expand" #if not $queueLength['subtitle']# style="display:none" #end if#><input type="button" class="shows-less btn" id="subtitle-btn-less" value="Collapse" style="display:none"></br>
|
||||
<table class="sickbeardTable manageTable" cellspacing="1" border="0" cellpadding="0" style="display:none">
|
||||
<thead></thead>
|
||||
<tbody>
|
||||
#set $row = 0
|
||||
#for $cur_show in $queueLength['subtitle']:
|
||||
#set $show = $findCertainShow($showList, $cur_show[0])
|
||||
#set $show_name = $show.name if $show else str($cur_show[0])
|
||||
<tr class="#echo ('odd', 'even')[$row % 2]##set $row+=1#">
|
||||
<td style="width:80%;text-align:left">
|
||||
<a class="whitelink" href="$sbRoot/home/displayShow?show=$cur_show[0]">$show_name</a>
|
||||
</td>
|
||||
<td style="width:20%;text-align:center;color:white">#if $cur_show[1]#Scheduled#end if#</td>
|
||||
</tr>
|
||||
#end for
|
||||
</tbody>
|
||||
</table>
|
||||
#else
|
||||
</br>
|
||||
#end if
|
||||
#end if
|
||||
</div>
|
||||
</div>
|
||||
#include $os.path.join($sickbeard.PROG_DIR, 'gui/slick/interfaces/default/inc_bottom.tmpl')
|
33
gui/slick/js/manageSearches.js
Normal file
33
gui/slick/js/manageSearches.js
Normal file
|
@ -0,0 +1,33 @@
|
|||
$(document).ready(function() {
|
||||
$('#recentsearch,#propersearch').click(function(){
|
||||
$(this).addClass('disabled');
|
||||
})
|
||||
$('#forcebacklog,#forcefullbacklog').click(function(){
|
||||
$('#forcebacklog,#forcefullbacklog').addClass('disabled');
|
||||
$('#pausebacklog').removeClass('disabled');
|
||||
})
|
||||
$('#pausebacklog').click(function(){
|
||||
$(this).addClass('disabled');
|
||||
})
|
||||
$('.show-all-less').click(function(){
|
||||
$(this).nextAll('table').hide();
|
||||
$(this).nextAll('input.shows-more').show();
|
||||
$(this).nextAll('input.shows-less').hide();
|
||||
})
|
||||
$('.show-all-more').click(function(){
|
||||
$(this).nextAll('table').show();
|
||||
$(this).nextAll('input.shows-more').hide();
|
||||
$(this).nextAll('input.shows-less').show();
|
||||
})
|
||||
|
||||
$('.shows-less').click(function(){
|
||||
$(this).nextAll('table:first').hide();
|
||||
$(this).hide();
|
||||
$(this).prevAll('input:first').show();
|
||||
})
|
||||
$('.shows-more').click(function(){
|
||||
$(this).nextAll('table:first').show();
|
||||
$(this).hide();
|
||||
$(this).nextAll('input:first').show();
|
||||
})
|
||||
});
|
26
gui/slick/js/manageShowQueueOverview.js
Normal file
26
gui/slick/js/manageShowQueueOverview.js
Normal file
|
@ -0,0 +1,26 @@
|
|||
$(document).ready(function() {
|
||||
$('#showupdatebutton').click(function(){
|
||||
$(this).addClass('disabled');
|
||||
})
|
||||
$('.show-all-less').click(function(){
|
||||
$(this).nextAll('table').hide();
|
||||
$(this).nextAll('input.shows-more').show();
|
||||
$(this).nextAll('input.shows-less').hide();
|
||||
})
|
||||
$('.show-all-more').click(function(){
|
||||
$(this).nextAll('table').show();
|
||||
$(this).nextAll('input.shows-more').hide();
|
||||
$(this).nextAll('input.shows-less').show();
|
||||
})
|
||||
|
||||
$('.shows-less').click(function(){
|
||||
$(this).nextAll('table:first').hide();
|
||||
$(this).hide();
|
||||
$(this).prevAll('input:first').show();
|
||||
})
|
||||
$('.shows-more').click(function(){
|
||||
$(this).nextAll('table:first').show();
|
||||
$(this).hide();
|
||||
$(this).nextAll('input:first').show();
|
||||
})
|
||||
});
|
|
@ -40,12 +40,12 @@ from providers import ezrss, btn, newznab, womble, thepiratebay, torrentleech, k
|
|||
freshontv, bitsoup, tokyotoshokan, animenzb, totv
|
||||
from sickbeard.config import CheckSection, check_setting_int, check_setting_str, check_setting_float, ConfigMigrator, \
|
||||
naming_ep_type, minimax
|
||||
from sickbeard import searchBacklog, showUpdater, versionChecker, properFinder, autoPostProcesser, \
|
||||
from sickbeard import searchBacklog, showUpdater, versionChecker, autoPostProcesser, \
|
||||
subtitles, traktChecker
|
||||
from sickbeard import helpers, db, exceptions, show_queue, search_queue, scheduler, show_name_helpers
|
||||
from sickbeard import logger
|
||||
from sickbeard import naming
|
||||
from sickbeard import searchRecent
|
||||
from sickbeard import searchRecent, searchProper
|
||||
from sickbeard import scene_numbering, scene_exceptions, name_cache
|
||||
from indexers.indexer_api import indexerApi
|
||||
from indexers.indexer_exceptions import indexer_shownotfound, indexer_exception, indexer_error, indexer_episodenotfound, \
|
||||
|
@ -1158,34 +1158,35 @@ def initialize(consoleLogging=True):
|
|||
update_now = datetime.timedelta(minutes=0)
|
||||
versionCheckScheduler = scheduler.Scheduler(versionChecker.CheckVersion(),
|
||||
cycleTime=datetime.timedelta(hours=UPDATE_FREQUENCY),
|
||||
threadName="CHECKVERSION",
|
||||
threadName='CHECKVERSION',
|
||||
silent=False)
|
||||
|
||||
showQueueScheduler = scheduler.Scheduler(show_queue.ShowQueue(),
|
||||
cycleTime=datetime.timedelta(seconds=3),
|
||||
threadName="SHOWQUEUE")
|
||||
threadName='SHOWQUEUE')
|
||||
|
||||
showUpdateScheduler = scheduler.Scheduler(showUpdater.ShowUpdater(),
|
||||
cycleTime=datetime.timedelta(hours=1),
|
||||
threadName="SHOWUPDATER",
|
||||
start_time=datetime.time(hour=SHOW_UPDATE_HOUR)) # 3 AM
|
||||
threadName='SHOWUPDATER',
|
||||
start_time=datetime.time(hour=SHOW_UPDATE_HOUR),
|
||||
prevent_cycle_run=sickbeard.showQueueScheduler.action.isShowUpdateRunning) # 3 AM
|
||||
|
||||
# searchers
|
||||
searchQueueScheduler = scheduler.Scheduler(search_queue.SearchQueue(),
|
||||
cycleTime=datetime.timedelta(seconds=3),
|
||||
threadName="SEARCHQUEUE")
|
||||
threadName='SEARCHQUEUE')
|
||||
|
||||
update_interval = datetime.timedelta(minutes=RECENTSEARCH_FREQUENCY)
|
||||
recentSearchScheduler = scheduler.Scheduler(searchRecent.RecentSearcher(),
|
||||
cycleTime=update_interval,
|
||||
threadName="RECENTSEARCHER",
|
||||
threadName='RECENTSEARCHER',
|
||||
run_delay=update_now if RECENTSEARCH_STARTUP
|
||||
else datetime.timedelta(minutes=5),
|
||||
prevent_cycle_run=sickbeard.searchQueueScheduler.action.is_recentsearch_in_progress)
|
||||
|
||||
backlogSearchScheduler = searchBacklog.BacklogSearchScheduler(searchBacklog.BacklogSearcher(),
|
||||
cycleTime=datetime.timedelta(minutes=get_backlog_cycle_time()),
|
||||
threadName="BACKLOG",
|
||||
threadName='BACKLOG',
|
||||
run_delay=update_now if BACKLOG_STARTUP
|
||||
else datetime.timedelta(minutes=10),
|
||||
prevent_cycle_run=sickbeard.searchQueueScheduler.action.is_standard_backlog_in_progress)
|
||||
|
@ -1198,27 +1199,28 @@ def initialize(consoleLogging=True):
|
|||
update_interval = datetime.timedelta(hours=1)
|
||||
run_at = datetime.time(hour=1) # 1 AM
|
||||
|
||||
properFinderScheduler = scheduler.Scheduler(properFinder.ProperFinder(),
|
||||
properFinderScheduler = scheduler.Scheduler(searchProper.ProperSearcher(),
|
||||
cycleTime=update_interval,
|
||||
threadName="FINDPROPERS",
|
||||
threadName='FINDPROPERS',
|
||||
start_time=run_at,
|
||||
run_delay=update_interval)
|
||||
run_delay=update_interval,
|
||||
prevent_cycle_run=sickbeard.searchQueueScheduler.action.is_propersearch_in_progress)
|
||||
|
||||
# processors
|
||||
autoPostProcesserScheduler = scheduler.Scheduler(autoPostProcesser.PostProcesser(),
|
||||
cycleTime=datetime.timedelta(
|
||||
minutes=AUTOPOSTPROCESSER_FREQUENCY),
|
||||
threadName="POSTPROCESSER",
|
||||
minutes=AUTOPOSTPROCESSER_FREQUENCY),
|
||||
threadName='POSTPROCESSER',
|
||||
silent=not PROCESS_AUTOMATICALLY)
|
||||
|
||||
traktCheckerScheduler = scheduler.Scheduler(traktChecker.TraktChecker(),
|
||||
cycleTime=datetime.timedelta(hours=1),
|
||||
threadName="TRAKTCHECKER",
|
||||
threadName='TRAKTCHECKER',
|
||||
silent=not USE_TRAKT)
|
||||
|
||||
subtitlesFinderScheduler = scheduler.Scheduler(subtitles.SubtitlesFinder(),
|
||||
cycleTime=datetime.timedelta(hours=SUBTITLES_FINDER_FREQUENCY),
|
||||
threadName="FINDSUBTITLES",
|
||||
threadName='FINDSUBTITLES',
|
||||
silent=not USE_SUBTITLES)
|
||||
|
||||
showList = []
|
||||
|
|
|
@ -35,69 +35,73 @@ class GenericQueue(object):
|
|||
|
||||
self.queue = []
|
||||
|
||||
self.queue_name = "QUEUE"
|
||||
self.queue_name = 'QUEUE'
|
||||
|
||||
self.min_priority = 0
|
||||
|
||||
self.lock = threading.Lock()
|
||||
|
||||
def pause(self):
|
||||
logger.log(u"Pausing queue")
|
||||
self.min_priority = 999999999999
|
||||
logger.log(u'Pausing queue')
|
||||
if self.lock:
|
||||
self.min_priority = 999999999999
|
||||
|
||||
def unpause(self):
|
||||
logger.log(u"Unpausing queue")
|
||||
self.min_priority = 0
|
||||
logger.log(u'Unpausing queue')
|
||||
with self.lock:
|
||||
self.min_priority = 0
|
||||
|
||||
def add_item(self, item):
|
||||
item.added = datetime.datetime.now()
|
||||
self.queue.append(item)
|
||||
with self.lock:
|
||||
item.added = datetime.datetime.now()
|
||||
self.queue.append(item)
|
||||
|
||||
return item
|
||||
return item
|
||||
|
||||
def run(self, force=False):
|
||||
|
||||
# only start a new task if one isn't already going
|
||||
if self.currentItem is None or not self.currentItem.isAlive():
|
||||
with self.lock:
|
||||
if self.currentItem is None or not self.currentItem.isAlive():
|
||||
|
||||
# if the thread is dead then the current item should be finished
|
||||
if self.currentItem:
|
||||
self.currentItem.finish()
|
||||
self.currentItem = None
|
||||
# if the thread is dead then the current item should be finished
|
||||
if self.currentItem:
|
||||
self.currentItem.finish()
|
||||
self.currentItem = None
|
||||
|
||||
# if there's something in the queue then run it in a thread and take it out of the queue
|
||||
if len(self.queue) > 0:
|
||||
# if there's something in the queue then run it in a thread and take it out of the queue
|
||||
if len(self.queue) > 0:
|
||||
|
||||
# sort by priority
|
||||
def sorter(x, y):
|
||||
"""
|
||||
Sorts by priority descending then time ascending
|
||||
"""
|
||||
if x.priority == y.priority:
|
||||
if y.added == x.added:
|
||||
return 0
|
||||
elif y.added < x.added:
|
||||
return 1
|
||||
elif y.added > x.added:
|
||||
return -1
|
||||
else:
|
||||
return y.priority - x.priority
|
||||
# sort by priority
|
||||
def sorter(x, y):
|
||||
"""
|
||||
Sorts by priority descending then time ascending
|
||||
"""
|
||||
if x.priority == y.priority:
|
||||
if y.added == x.added:
|
||||
return 0
|
||||
elif y.added < x.added:
|
||||
return 1
|
||||
elif y.added > x.added:
|
||||
return -1
|
||||
else:
|
||||
return y.priority - x.priority
|
||||
|
||||
self.queue.sort(cmp=sorter)
|
||||
if self.queue[0].priority < self.min_priority:
|
||||
return
|
||||
self.queue.sort(cmp=sorter)
|
||||
if self.queue[0].priority < self.min_priority:
|
||||
return
|
||||
|
||||
# launch the queue item in a thread
|
||||
self.currentItem = self.queue.pop(0)
|
||||
if not self.queue_name == 'SEARCHQUEUE':
|
||||
self.currentItem.name = self.queue_name + '-' + self.currentItem.name
|
||||
self.currentItem.start()
|
||||
# launch the queue item in a thread
|
||||
self.currentItem = self.queue.pop(0)
|
||||
if not self.queue_name == 'SEARCHQUEUE':
|
||||
self.currentItem.name = self.queue_name + '-' + self.currentItem.name
|
||||
self.currentItem.start()
|
||||
|
||||
class QueueItem(threading.Thread):
|
||||
def __init__(self, name, action_id=0):
|
||||
super(QueueItem, self).__init__()
|
||||
|
||||
self.name = name.replace(" ", "-").upper()
|
||||
self.name = name.replace(' ', '-').upper()
|
||||
self.inProgress = False
|
||||
self.priority = QueuePriorities.NORMAL
|
||||
self.action_id = action_id
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# Author: Nic Wolfe <nic@wolfeden.ca>
|
||||
# Author: Nic Wolfe <nic@wolfeden.ca>
|
||||
# URL: http://code.google.com/p/sickbeard/
|
||||
#
|
||||
# This file is part of SickGear.
|
||||
|
@ -1412,3 +1412,21 @@ def clear_unused_providers():
|
|||
if providers:
|
||||
myDB = db.DBConnection('cache.db')
|
||||
myDB.action('DELETE FROM provider_cache WHERE provider NOT IN (%s)' % ','.join(['?'] * len(providers)), providers)
|
||||
|
||||
def make_search_segment_html_string(segment, max_eps=5):
|
||||
seg_str = ''
|
||||
if segment and not isinstance(segment, list):
|
||||
segment = [segment]
|
||||
if segment and len(segment) > max_eps:
|
||||
seasons = [x for x in set([x.season for x in segment])]
|
||||
seg_str = u'Season' + maybe_plural(len(seasons)) + ': '
|
||||
first_run = True
|
||||
for x in seasons:
|
||||
eps = [str(s.episode) for s in segment if s.season == x]
|
||||
ep_c = len(eps)
|
||||
seg_str += ('' if first_run else ' ,') + str(x) + ' <span title="Episode' + maybe_plural(ep_c) + ': ' + ', '.join(eps) + '">(' + str(ep_c) + ' Ep' + maybe_plural(ep_c) + ')</span>'
|
||||
first_run = False
|
||||
elif segment:
|
||||
episodes = ['S' + str(x.season).zfill(2) + 'E' + str(x.episode).zfill(2) for x in segment]
|
||||
seg_str = u'Episode' + maybe_plural(len(episodes)) + ': ' + ', '.join(episodes)
|
||||
return seg_str
|
||||
|
|
|
@ -35,255 +35,247 @@ from sickbeard.common import DOWNLOADED, SNATCHED, SNATCHED_PROPER, Quality
|
|||
from name_parser.parser import NameParser, InvalidNameException, InvalidShowException
|
||||
|
||||
|
||||
class ProperFinder():
|
||||
def __init__(self):
|
||||
self.amActive = False
|
||||
def searchPropers():
|
||||
|
||||
def run(self):
|
||||
if not sickbeard.DOWNLOAD_PROPERS:
|
||||
return
|
||||
|
||||
if not sickbeard.DOWNLOAD_PROPERS:
|
||||
return
|
||||
logger.log(u'Beginning the search for new propers')
|
||||
|
||||
logger.log(u"Beginning the search for new propers")
|
||||
propers = _getProperList()
|
||||
|
||||
self.amActive = True
|
||||
if propers:
|
||||
_downloadPropers(propers)
|
||||
|
||||
propers = self._getProperList()
|
||||
_set_lastProperSearch(datetime.datetime.today().toordinal())
|
||||
|
||||
if propers:
|
||||
self._downloadPropers(propers)
|
||||
run_at = ''
|
||||
if None is sickbeard.properFinderScheduler.start_time:
|
||||
run_in = sickbeard.properFinderScheduler.lastRun + sickbeard.properFinderScheduler.cycleTime - datetime.datetime.now()
|
||||
hours, remainder = divmod(run_in.seconds, 3600)
|
||||
minutes, seconds = divmod(remainder, 60)
|
||||
run_at = u', next check in approx. ' + (
|
||||
'%dh, %dm' % (hours, minutes) if 0 < hours else '%dm, %ds' % (minutes, seconds))
|
||||
|
||||
self._set_lastProperSearch(datetime.datetime.today().toordinal())
|
||||
logger.log(u'Completed the search for new propers%s' % run_at)
|
||||
|
||||
run_at = ""
|
||||
if None is sickbeard.properFinderScheduler.start_time:
|
||||
run_in = sickbeard.properFinderScheduler.lastRun + sickbeard.properFinderScheduler.cycleTime - datetime.datetime.now()
|
||||
hours, remainder = divmod(run_in.seconds, 3600)
|
||||
minutes, seconds = divmod(remainder, 60)
|
||||
run_at = u", next check in approx. " + (
|
||||
"%dh, %dm" % (hours, minutes) if 0 < hours else "%dm, %ds" % (minutes, seconds))
|
||||
def _getProperList():
|
||||
propers = {}
|
||||
|
||||
logger.log(u"Completed the search for new propers%s" % run_at)
|
||||
search_date = datetime.datetime.today() - datetime.timedelta(days=2)
|
||||
|
||||
self.amActive = False
|
||||
# for each provider get a list of the
|
||||
origThreadName = threading.currentThread().name
|
||||
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive()]
|
||||
for curProvider in providers:
|
||||
threading.currentThread().name = origThreadName + ' :: [' + curProvider.name + ']'
|
||||
|
||||
def _getProperList(self):
|
||||
propers = {}
|
||||
logger.log(u'Searching for any new PROPER releases from ' + curProvider.name)
|
||||
|
||||
search_date = datetime.datetime.today() - datetime.timedelta(days=2)
|
||||
try:
|
||||
curPropers = curProvider.findPropers(search_date)
|
||||
except exceptions.AuthException, e:
|
||||
logger.log(u'Authentication error: ' + ex(e), logger.ERROR)
|
||||
continue
|
||||
except Exception, e:
|
||||
logger.log(u'Error while searching ' + curProvider.name + ', skipping: ' + ex(e), logger.ERROR)
|
||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||
continue
|
||||
finally:
|
||||
threading.currentThread().name = origThreadName
|
||||
|
||||
# for each provider get a list of the
|
||||
origThreadName = threading.currentThread().name
|
||||
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive()]
|
||||
for curProvider in providers:
|
||||
threading.currentThread().name = origThreadName + " :: [" + curProvider.name + "]"
|
||||
# if they haven't been added by a different provider than add the proper to the list
|
||||
for x in curPropers:
|
||||
name = _genericName(x.name)
|
||||
if not name in propers:
|
||||
logger.log(u'Found new proper: ' + x.name, logger.DEBUG)
|
||||
x.provider = curProvider
|
||||
propers[name] = x
|
||||
|
||||
logger.log(u"Searching for any new PROPER releases from " + curProvider.name)
|
||||
# take the list of unique propers and get it sorted by
|
||||
sortedPropers = sorted(propers.values(), key=operator.attrgetter('date'), reverse=True)
|
||||
finalPropers = []
|
||||
|
||||
try:
|
||||
curPropers = curProvider.findPropers(search_date)
|
||||
except exceptions.AuthException, e:
|
||||
logger.log(u"Authentication error: " + ex(e), logger.ERROR)
|
||||
continue
|
||||
except Exception, e:
|
||||
logger.log(u"Error while searching " + curProvider.name + ", skipping: " + ex(e), logger.ERROR)
|
||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||
continue
|
||||
finally:
|
||||
threading.currentThread().name = origThreadName
|
||||
for curProper in sortedPropers:
|
||||
|
||||
# if they haven't been added by a different provider than add the proper to the list
|
||||
for x in curPropers:
|
||||
name = self._genericName(x.name)
|
||||
if not name in propers:
|
||||
logger.log(u"Found new proper: " + x.name, logger.DEBUG)
|
||||
x.provider = curProvider
|
||||
propers[name] = x
|
||||
try:
|
||||
myParser = NameParser(False)
|
||||
parse_result = myParser.parse(curProper.name)
|
||||
except InvalidNameException:
|
||||
logger.log(u'Unable to parse the filename ' + curProper.name + ' into a valid episode', logger.DEBUG)
|
||||
continue
|
||||
except InvalidShowException:
|
||||
logger.log(u'Unable to parse the filename ' + curProper.name + ' into a valid show', logger.DEBUG)
|
||||
continue
|
||||
|
||||
# take the list of unique propers and get it sorted by
|
||||
sortedPropers = sorted(propers.values(), key=operator.attrgetter('date'), reverse=True)
|
||||
finalPropers = []
|
||||
|
||||
for curProper in sortedPropers:
|
||||
|
||||
try:
|
||||
myParser = NameParser(False)
|
||||
parse_result = myParser.parse(curProper.name)
|
||||
except InvalidNameException:
|
||||
logger.log(u"Unable to parse the filename " + curProper.name + " into a valid episode", logger.DEBUG)
|
||||
continue
|
||||
except InvalidShowException:
|
||||
logger.log(u"Unable to parse the filename " + curProper.name + " into a valid show", logger.DEBUG)
|
||||
continue
|
||||
|
||||
if not parse_result.series_name:
|
||||
continue
|
||||
|
||||
if not parse_result.episode_numbers:
|
||||
logger.log(
|
||||
u"Ignoring " + curProper.name + " because it's for a full season rather than specific episode",
|
||||
logger.DEBUG)
|
||||
continue
|
||||
if not parse_result.series_name:
|
||||
continue
|
||||
|
||||
if not parse_result.episode_numbers:
|
||||
logger.log(
|
||||
u"Successful match! Result " + parse_result.original_name + " matched to show " + parse_result.show.name,
|
||||
u'Ignoring ' + curProper.name + ' because it\'s for a full season rather than specific episode',
|
||||
logger.DEBUG)
|
||||
continue
|
||||
|
||||
# set the indexerid in the db to the show's indexerid
|
||||
curProper.indexerid = parse_result.show.indexerid
|
||||
logger.log(
|
||||
u'Successful match! Result ' + parse_result.original_name + ' matched to show ' + parse_result.show.name,
|
||||
logger.DEBUG)
|
||||
|
||||
# set the indexer in the db to the show's indexer
|
||||
curProper.indexer = parse_result.show.indexer
|
||||
# set the indexerid in the db to the show's indexerid
|
||||
curProper.indexerid = parse_result.show.indexerid
|
||||
|
||||
# populate our Proper instance
|
||||
curProper.season = parse_result.season_number if parse_result.season_number != None else 1
|
||||
curProper.episode = parse_result.episode_numbers[0]
|
||||
curProper.release_group = parse_result.release_group
|
||||
curProper.version = parse_result.version
|
||||
curProper.quality = Quality.nameQuality(curProper.name, parse_result.is_anime)
|
||||
# set the indexer in the db to the show's indexer
|
||||
curProper.indexer = parse_result.show.indexer
|
||||
|
||||
# only get anime proper if it has release group and version
|
||||
if parse_result.is_anime:
|
||||
if not curProper.release_group and curProper.version == -1:
|
||||
logger.log(u"Proper " + curProper.name + " doesn't have a release group and version, ignoring it",
|
||||
logger.DEBUG)
|
||||
continue
|
||||
# populate our Proper instance
|
||||
curProper.season = parse_result.season_number if parse_result.season_number != None else 1
|
||||
curProper.episode = parse_result.episode_numbers[0]
|
||||
curProper.release_group = parse_result.release_group
|
||||
curProper.version = parse_result.version
|
||||
curProper.quality = Quality.nameQuality(curProper.name, parse_result.is_anime)
|
||||
|
||||
if not show_name_helpers.filterBadReleases(curProper.name, parse=False):
|
||||
logger.log(u"Proper " + curProper.name + " isn't a valid scene release that we want, ignoring it",
|
||||
# only get anime proper if it has release group and version
|
||||
if parse_result.is_anime:
|
||||
if not curProper.release_group and curProper.version == -1:
|
||||
logger.log(u'Proper ' + curProper.name + ' doesn\'t have a release group and version, ignoring it',
|
||||
logger.DEBUG)
|
||||
continue
|
||||
|
||||
if parse_result.show.rls_ignore_words and search.filter_release_name(curProper.name,
|
||||
parse_result.show.rls_ignore_words):
|
||||
logger.log(
|
||||
u"Ignoring " + curProper.name + " based on ignored words filter: " + parse_result.show.rls_ignore_words,
|
||||
logger.MESSAGE)
|
||||
continue
|
||||
if not show_name_helpers.filterBadReleases(curProper.name, parse=False):
|
||||
logger.log(u'Proper ' + curProper.name + ' isn\'t a valid scene release that we want, ignoring it',
|
||||
logger.DEBUG)
|
||||
continue
|
||||
|
||||
if parse_result.show.rls_require_words and not search.filter_release_name(curProper.name,
|
||||
parse_result.show.rls_require_words):
|
||||
logger.log(
|
||||
u"Ignoring " + curProper.name + " based on required words filter: " + parse_result.show.rls_require_words,
|
||||
logger.MESSAGE)
|
||||
continue
|
||||
if parse_result.show.rls_ignore_words and search.filter_release_name(curProper.name,
|
||||
parse_result.show.rls_ignore_words):
|
||||
logger.log(
|
||||
u'Ignoring ' + curProper.name + ' based on ignored words filter: ' + parse_result.show.rls_ignore_words,
|
||||
logger.MESSAGE)
|
||||
continue
|
||||
|
||||
# check if we actually want this proper (if it's the right quality)
|
||||
if parse_result.show.rls_require_words and not search.filter_release_name(curProper.name,
|
||||
parse_result.show.rls_require_words):
|
||||
logger.log(
|
||||
u'Ignoring ' + curProper.name + ' based on required words filter: ' + parse_result.show.rls_require_words,
|
||||
logger.MESSAGE)
|
||||
continue
|
||||
|
||||
# check if we actually want this proper (if it's the right quality)
|
||||
myDB = db.DBConnection()
|
||||
sqlResults = myDB.select('SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?',
|
||||
[curProper.indexerid, curProper.season, curProper.episode])
|
||||
if not sqlResults:
|
||||
continue
|
||||
|
||||
# only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones)
|
||||
oldStatus, oldQuality = Quality.splitCompositeStatus(int(sqlResults[0]['status']))
|
||||
if oldStatus not in (DOWNLOADED, SNATCHED) or oldQuality != curProper.quality:
|
||||
continue
|
||||
|
||||
# check if we actually want this proper (if it's the right release group and a higher version)
|
||||
if parse_result.is_anime:
|
||||
myDB = db.DBConnection()
|
||||
sqlResults = myDB.select("SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?",
|
||||
[curProper.indexerid, curProper.season, curProper.episode])
|
||||
if not sqlResults:
|
||||
continue
|
||||
sqlResults = myDB.select(
|
||||
'SELECT release_group, version FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?',
|
||||
[curProper.indexerid, curProper.season, curProper.episode])
|
||||
|
||||
# only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones)
|
||||
oldStatus, oldQuality = Quality.splitCompositeStatus(int(sqlResults[0]["status"]))
|
||||
if oldStatus not in (DOWNLOADED, SNATCHED) or oldQuality != curProper.quality:
|
||||
continue
|
||||
|
||||
# check if we actually want this proper (if it's the right release group and a higher version)
|
||||
if parse_result.is_anime:
|
||||
myDB = db.DBConnection()
|
||||
sqlResults = myDB.select(
|
||||
"SELECT release_group, version FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?",
|
||||
[curProper.indexerid, curProper.season, curProper.episode])
|
||||
|
||||
oldVersion = int(sqlResults[0]["version"])
|
||||
oldRelease_group = (sqlResults[0]["release_group"])
|
||||
|
||||
if oldVersion > -1 and oldVersion < curProper.version:
|
||||
logger.log("Found new anime v" + str(curProper.version) + " to replace existing v" + str(oldVersion))
|
||||
else:
|
||||
continue
|
||||
|
||||
if oldRelease_group != curProper.release_group:
|
||||
logger.log("Skipping proper from release group: " + curProper.release_group + ", does not match existing release group: " + oldRelease_group)
|
||||
continue
|
||||
|
||||
# if the show is in our list and there hasn't been a proper already added for that particular episode then add it to our list of propers
|
||||
if curProper.indexerid != -1 and (curProper.indexerid, curProper.season, curProper.episode) not in map(
|
||||
operator.attrgetter('indexerid', 'season', 'episode'), finalPropers):
|
||||
logger.log(u"Found a proper that we need: " + str(curProper.name))
|
||||
finalPropers.append(curProper)
|
||||
|
||||
return finalPropers
|
||||
|
||||
def _downloadPropers(self, properList):
|
||||
|
||||
for curProper in properList:
|
||||
|
||||
historyLimit = datetime.datetime.today() - datetime.timedelta(days=30)
|
||||
|
||||
# make sure the episode has been downloaded before
|
||||
myDB = db.DBConnection()
|
||||
historyResults = myDB.select(
|
||||
"SELECT resource FROM history "
|
||||
"WHERE showid = ? AND season = ? AND episode = ? AND quality = ? AND date >= ? "
|
||||
"AND action IN (" + ",".join([str(x) for x in Quality.SNATCHED]) + ")",
|
||||
[curProper.indexerid, curProper.season, curProper.episode, curProper.quality,
|
||||
historyLimit.strftime(history.dateFormat)])
|
||||
|
||||
# if we didn't download this episode in the first place we don't know what quality to use for the proper so we can't do it
|
||||
if len(historyResults) == 0:
|
||||
logger.log(
|
||||
u"Unable to find an original history entry for proper " + curProper.name + " so I'm not downloading it.")
|
||||
continue
|
||||
oldVersion = int(sqlResults[0]['version'])
|
||||
oldRelease_group = (sqlResults[0]['release_group'])
|
||||
|
||||
if oldVersion > -1 and oldVersion < curProper.version:
|
||||
logger.log('Found new anime v' + str(curProper.version) + ' to replace existing v' + str(oldVersion))
|
||||
else:
|
||||
continue
|
||||
|
||||
# make sure that none of the existing history downloads are the same proper we're trying to download
|
||||
clean_proper_name = self._genericName(helpers.remove_non_release_groups(curProper.name))
|
||||
isSame = False
|
||||
for curResult in historyResults:
|
||||
# if the result exists in history already we need to skip it
|
||||
if self._genericName(helpers.remove_non_release_groups(curResult["resource"])) == clean_proper_name:
|
||||
isSame = True
|
||||
break
|
||||
if isSame:
|
||||
logger.log(u"This proper is already in history, skipping it", logger.DEBUG)
|
||||
continue
|
||||
if oldRelease_group != curProper.release_group:
|
||||
logger.log('Skipping proper from release group: ' + curProper.release_group + ', does not match existing release group: ' + oldRelease_group)
|
||||
continue
|
||||
|
||||
# get the episode object
|
||||
showObj = helpers.findCertainShow(sickbeard.showList, curProper.indexerid)
|
||||
if showObj == None:
|
||||
logger.log(u"Unable to find the show with indexerid " + str(
|
||||
curProper.indexerid) + " so unable to download the proper", logger.ERROR)
|
||||
continue
|
||||
epObj = showObj.getEpisode(curProper.season, curProper.episode)
|
||||
# if the show is in our list and there hasn't been a proper already added for that particular episode then add it to our list of propers
|
||||
if curProper.indexerid != -1 and (curProper.indexerid, curProper.season, curProper.episode) not in map(
|
||||
operator.attrgetter('indexerid', 'season', 'episode'), finalPropers):
|
||||
logger.log(u'Found a proper that we need: ' + str(curProper.name))
|
||||
finalPropers.append(curProper)
|
||||
|
||||
# make the result object
|
||||
result = curProper.provider.getResult([epObj])
|
||||
result.url = curProper.url
|
||||
result.name = curProper.name
|
||||
result.quality = curProper.quality
|
||||
result.version = curProper.version
|
||||
return finalPropers
|
||||
|
||||
# snatch it
|
||||
search.snatchEpisode(result, SNATCHED_PROPER)
|
||||
def _downloadPropers(properList):
|
||||
|
||||
def _genericName(self, name):
|
||||
return name.replace(".", " ").replace("-", " ").replace("_", " ").lower()
|
||||
for curProper in properList:
|
||||
|
||||
def _set_lastProperSearch(self, when):
|
||||
|
||||
logger.log(u"Setting the last Proper search in the DB to " + str(when), logger.DEBUG)
|
||||
historyLimit = datetime.datetime.today() - datetime.timedelta(days=30)
|
||||
|
||||
# make sure the episode has been downloaded before
|
||||
myDB = db.DBConnection()
|
||||
sqlResults = myDB.select("SELECT * FROM info")
|
||||
historyResults = myDB.select(
|
||||
'SELECT resource FROM history '
|
||||
'WHERE showid = ? AND season = ? AND episode = ? AND quality = ? AND date >= ? '
|
||||
'AND action IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')',
|
||||
[curProper.indexerid, curProper.season, curProper.episode, curProper.quality,
|
||||
historyLimit.strftime(history.dateFormat)])
|
||||
|
||||
# if we didn't download this episode in the first place we don't know what quality to use for the proper so we can't do it
|
||||
if len(historyResults) == 0:
|
||||
logger.log(
|
||||
u'Unable to find an original history entry for proper ' + curProper.name + ' so I\'m not downloading it.')
|
||||
continue
|
||||
|
||||
if len(sqlResults) == 0:
|
||||
myDB.action("INSERT INTO info (last_backlog, last_indexer, last_proper_search) VALUES (?,?,?)",
|
||||
[0, 0, str(when)])
|
||||
else:
|
||||
myDB.action("UPDATE info SET last_proper_search=" + str(when))
|
||||
|
||||
def _get_lastProperSearch(self):
|
||||
# make sure that none of the existing history downloads are the same proper we're trying to download
|
||||
clean_proper_name = _genericName(helpers.remove_non_release_groups(curProper.name))
|
||||
isSame = False
|
||||
for curResult in historyResults:
|
||||
# if the result exists in history already we need to skip it
|
||||
if _genericName(helpers.remove_non_release_groups(curResult['resource'])) == clean_proper_name:
|
||||
isSame = True
|
||||
break
|
||||
if isSame:
|
||||
logger.log(u'This proper is already in history, skipping it', logger.DEBUG)
|
||||
continue
|
||||
|
||||
myDB = db.DBConnection()
|
||||
sqlResults = myDB.select("SELECT * FROM info")
|
||||
# get the episode object
|
||||
showObj = helpers.findCertainShow(sickbeard.showList, curProper.indexerid)
|
||||
if showObj == None:
|
||||
logger.log(u'Unable to find the show with indexerid ' + str(
|
||||
curProper.indexerid) + ' so unable to download the proper', logger.ERROR)
|
||||
continue
|
||||
epObj = showObj.getEpisode(curProper.season, curProper.episode)
|
||||
|
||||
try:
|
||||
last_proper_search = datetime.date.fromordinal(int(sqlResults[0]["last_proper_search"]))
|
||||
except:
|
||||
return datetime.date.fromordinal(1)
|
||||
# make the result object
|
||||
result = curProper.provider.getResult([epObj])
|
||||
result.url = curProper.url
|
||||
result.name = curProper.name
|
||||
result.quality = curProper.quality
|
||||
result.version = curProper.version
|
||||
|
||||
return last_proper_search
|
||||
# snatch it
|
||||
search.snatchEpisode(result, SNATCHED_PROPER)
|
||||
|
||||
def _genericName(name):
|
||||
return name.replace('.', ' ').replace('-', ' ').replace('_', ' ').lower()
|
||||
|
||||
def _set_lastProperSearch(when):
|
||||
|
||||
logger.log(u'Setting the last Proper search in the DB to ' + str(when), logger.DEBUG)
|
||||
|
||||
myDB = db.DBConnection()
|
||||
sqlResults = myDB.select('SELECT * FROM info')
|
||||
|
||||
if len(sqlResults) == 0:
|
||||
myDB.action('INSERT INTO info (last_backlog, last_indexer, last_proper_search) VALUES (?,?,?)',
|
||||
[0, 0, str(when)])
|
||||
else:
|
||||
myDB.action('UPDATE info SET last_proper_search=' + str(when))
|
||||
|
||||
def _get_lastProperSearch():
|
||||
|
||||
myDB = db.DBConnection()
|
||||
sqlResults = myDB.select('SELECT * FROM info')
|
||||
|
||||
try:
|
||||
last_proper_search = datetime.date.fromordinal(int(sqlResults[0]['last_proper_search']))
|
||||
except:
|
||||
return datetime.date.fromordinal(1)
|
||||
|
||||
return last_proper_search
|
||||
|
|
|
@ -46,7 +46,6 @@ class Scheduler(threading.Thread):
|
|||
|
||||
def forceRun(self):
|
||||
if not self.action.amActive:
|
||||
self.lastRun = datetime.datetime.fromordinal(1)
|
||||
self.force = True
|
||||
return True
|
||||
return False
|
||||
|
@ -55,42 +54,47 @@ class Scheduler(threading.Thread):
|
|||
|
||||
while not self.stop.is_set():
|
||||
|
||||
current_time = datetime.datetime.now()
|
||||
should_run = False
|
||||
|
||||
# check if interval has passed
|
||||
if current_time - self.lastRun >= self.cycleTime:
|
||||
# check if wanting to start around certain time taking interval into account
|
||||
if self.start_time:
|
||||
hour_diff = current_time.time().hour - self.start_time.hour
|
||||
if not hour_diff < 0 and hour_diff < self.cycleTime.seconds / 3600:
|
||||
should_run = True
|
||||
else:
|
||||
# set lastRun to only check start_time after another cycleTime
|
||||
self.lastRun = current_time
|
||||
else:
|
||||
should_run = True
|
||||
|
||||
if should_run and self.prevent_cycle_run is not None and self.prevent_cycle_run():
|
||||
logger.log(u'%s skipping this cycleTime' % self.name, logger.WARNING)
|
||||
# set lastRun to only check start_time after another cycleTime
|
||||
self.lastRun = current_time
|
||||
try:
|
||||
current_time = datetime.datetime.now()
|
||||
should_run = False
|
||||
|
||||
if should_run:
|
||||
self.lastRun = current_time
|
||||
# check if interval has passed
|
||||
if current_time - self.lastRun >= self.cycleTime:
|
||||
# check if wanting to start around certain time taking interval into account
|
||||
if self.start_time:
|
||||
hour_diff = current_time.time().hour - self.start_time.hour
|
||||
if not hour_diff < 0 and hour_diff < self.cycleTime.seconds / 3600:
|
||||
should_run = True
|
||||
else:
|
||||
# set lastRun to only check start_time after another cycleTime
|
||||
self.lastRun = current_time
|
||||
else:
|
||||
should_run = True
|
||||
|
||||
try:
|
||||
if not self.silent:
|
||||
logger.log(u"Starting new thread: " + self.name, logger.DEBUG)
|
||||
if self.force:
|
||||
should_run = True
|
||||
|
||||
self.action.run()
|
||||
except Exception, e:
|
||||
logger.log(u"Exception generated in thread " + self.name + ": " + ex(e), logger.ERROR)
|
||||
logger.log(repr(traceback.format_exc()), logger.DEBUG)
|
||||
if should_run and self.prevent_cycle_run is not None and self.prevent_cycle_run():
|
||||
logger.log(u'%s skipping this cycleTime' % self.name, logger.WARNING)
|
||||
# set lastRun to only check start_time after another cycleTime
|
||||
self.lastRun = current_time
|
||||
should_run = False
|
||||
|
||||
if self.force:
|
||||
self.force = False
|
||||
if should_run:
|
||||
self.lastRun = current_time
|
||||
|
||||
try:
|
||||
if not self.silent:
|
||||
logger.log(u"Starting new thread: " + self.name, logger.DEBUG)
|
||||
|
||||
self.action.run()
|
||||
except Exception, e:
|
||||
logger.log(u"Exception generated in thread " + self.name + ": " + ex(e), logger.ERROR)
|
||||
logger.log(repr(traceback.format_exc()), logger.DEBUG)
|
||||
|
||||
finally:
|
||||
if self.force:
|
||||
self.force = False
|
||||
|
||||
time.sleep(1)
|
||||
|
||||
|
|
|
@ -30,11 +30,14 @@ from sickbeard import ui
|
|||
from sickbeard import common
|
||||
from sickbeard.search import wantedEpisodes
|
||||
|
||||
NORMAL_BACKLOG = 0
|
||||
LIMITED_BACKLOG = 10
|
||||
FULL_BACKLOG = 20
|
||||
|
||||
class BacklogSearchScheduler(scheduler.Scheduler):
|
||||
def forceSearch(self):
|
||||
self.action._set_lastBacklog(1)
|
||||
self.lastRun = datetime.datetime.fromordinal(1)
|
||||
def forceSearch(self, force_type=NORMAL_BACKLOG):
|
||||
self.force = True
|
||||
self.action.forcetype = force_type
|
||||
|
||||
def nextRun(self):
|
||||
if self.action._lastBacklog <= 1:
|
||||
|
@ -54,6 +57,7 @@ class BacklogSearcher:
|
|||
self.amActive = False
|
||||
self.amPaused = False
|
||||
self.amWaiting = False
|
||||
self.forcetype = NORMAL_BACKLOG
|
||||
|
||||
self._resetPI()
|
||||
|
||||
|
@ -68,13 +72,13 @@ class BacklogSearcher:
|
|||
return None
|
||||
|
||||
def am_running(self):
|
||||
logger.log(u"amWaiting: " + str(self.amWaiting) + ", amActive: " + str(self.amActive), logger.DEBUG)
|
||||
logger.log(u'amWaiting: ' + str(self.amWaiting) + ', amActive: ' + str(self.amActive), logger.DEBUG)
|
||||
return (not self.amWaiting) and self.amActive
|
||||
|
||||
def searchBacklog(self, which_shows=None):
|
||||
def searchBacklog(self, which_shows=None, force_type=NORMAL_BACKLOG):
|
||||
|
||||
if self.amActive:
|
||||
logger.log(u"Backlog is still running, not starting it again", logger.DEBUG)
|
||||
logger.log(u'Backlog is still running, not starting it again', logger.DEBUG)
|
||||
return
|
||||
|
||||
if which_shows:
|
||||
|
@ -89,9 +93,15 @@ class BacklogSearcher:
|
|||
curDate = datetime.date.today().toordinal()
|
||||
fromDate = datetime.date.fromordinal(1)
|
||||
|
||||
if not which_shows and not curDate - self._lastBacklog >= self.cycleTime:
|
||||
limited_backlog = False
|
||||
if (not which_shows and force_type == LIMITED_BACKLOG) or (not which_shows and force_type != FULL_BACKLOG and not curDate - self._lastBacklog >= self.cycleTime):
|
||||
logger.log(u'Running limited backlog for episodes missed during the last %s day(s)' % str(sickbeard.BACKLOG_DAYS))
|
||||
fromDate = datetime.date.today() - datetime.timedelta(days=sickbeard.BACKLOG_DAYS)
|
||||
limited_backlog = True
|
||||
|
||||
forced = False
|
||||
if not which_shows and force_type != NORMAL_BACKLOG:
|
||||
forced = True
|
||||
|
||||
self.amActive = True
|
||||
self.amPaused = False
|
||||
|
@ -105,9 +115,9 @@ class BacklogSearcher:
|
|||
segments = wantedEpisodes(curShow, fromDate, make_dict=True)
|
||||
|
||||
for season, segment in segments.items():
|
||||
self.currentSearchInfo = {'title': curShow.name + " Season " + str(season)}
|
||||
self.currentSearchInfo = {'title': curShow.name + ' Season ' + str(season)}
|
||||
|
||||
backlog_queue_item = search_queue.BacklogQueueItem(curShow, segment, standard_backlog=standard_backlog)
|
||||
backlog_queue_item = search_queue.BacklogQueueItem(curShow, segment, standard_backlog=standard_backlog, limited_backlog=limited_backlog, forced=forced)
|
||||
sickbeard.searchQueueScheduler.action.add_item(backlog_queue_item) # @UndefinedVariable
|
||||
else:
|
||||
logger.log(u'Nothing needs to be downloaded for %s, skipping' % str(curShow.name), logger.DEBUG)
|
||||
|
@ -122,17 +132,17 @@ class BacklogSearcher:
|
|||
|
||||
def _get_lastBacklog(self):
|
||||
|
||||
logger.log(u"Retrieving the last check time from the DB", logger.DEBUG)
|
||||
logger.log(u'Retrieving the last check time from the DB', logger.DEBUG)
|
||||
|
||||
myDB = db.DBConnection()
|
||||
sqlResults = myDB.select("SELECT * FROM info")
|
||||
sqlResults = myDB.select('SELECT * FROM info')
|
||||
|
||||
if len(sqlResults) == 0:
|
||||
lastBacklog = 1
|
||||
elif sqlResults[0]["last_backlog"] == None or sqlResults[0]["last_backlog"] == "":
|
||||
elif sqlResults[0]['last_backlog'] == None or sqlResults[0]['last_backlog'] == '':
|
||||
lastBacklog = 1
|
||||
else:
|
||||
lastBacklog = int(sqlResults[0]["last_backlog"])
|
||||
lastBacklog = int(sqlResults[0]['last_backlog'])
|
||||
if lastBacklog > datetime.date.today().toordinal():
|
||||
lastBacklog = 1
|
||||
|
||||
|
@ -141,19 +151,21 @@ class BacklogSearcher:
|
|||
|
||||
def _set_lastBacklog(self, when):
|
||||
|
||||
logger.log(u"Setting the last backlog in the DB to " + str(when), logger.DEBUG)
|
||||
logger.log(u'Setting the last backlog in the DB to ' + str(when), logger.DEBUG)
|
||||
|
||||
myDB = db.DBConnection()
|
||||
sqlResults = myDB.select("SELECT * FROM info")
|
||||
sqlResults = myDB.select('SELECT * FROM info')
|
||||
|
||||
if len(sqlResults) == 0:
|
||||
myDB.action("INSERT INTO info (last_backlog, last_indexer) VALUES (?,?)", [str(when), 0])
|
||||
myDB.action('INSERT INTO info (last_backlog, last_indexer) VALUES (?,?)', [str(when), 0])
|
||||
else:
|
||||
myDB.action("UPDATE info SET last_backlog=" + str(when))
|
||||
myDB.action('UPDATE info SET last_backlog=' + str(when))
|
||||
|
||||
def run(self):
|
||||
try:
|
||||
self.searchBacklog()
|
||||
force_type = self.forcetype
|
||||
self.forcetype = NORMAL_BACKLOG
|
||||
self.searchBacklog(force_type=force_type)
|
||||
except:
|
||||
self.amActive = False
|
||||
raise
|
||||
|
|
38
sickbeard/searchProper.py
Normal file
38
sickbeard/searchProper.py
Normal file
|
@ -0,0 +1,38 @@
|
|||
# Author: Nic Wolfe <nic@wolfeden.ca>
|
||||
# URL: http://code.google.com/p/sickbeard/
|
||||
#
|
||||
# This file is part of SickGear.
|
||||
#
|
||||
# SickGear is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# SickGear is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import with_statement
|
||||
|
||||
import threading
|
||||
|
||||
import sickbeard
|
||||
|
||||
|
||||
class ProperSearcher():
|
||||
def __init__(self):
|
||||
self.lock = threading.Lock()
|
||||
self.amActive = False
|
||||
|
||||
def run(self):
|
||||
|
||||
self.amActive = True
|
||||
|
||||
propersearch_queue_item = sickbeard.search_queue.ProperSearchQueueItem()
|
||||
sickbeard.searchQueueScheduler.action.add_item(propersearch_queue_item)
|
||||
|
||||
self.amActive = False
|
|
@ -25,7 +25,7 @@ import datetime
|
|||
|
||||
import sickbeard
|
||||
from sickbeard import db, logger, common, exceptions, helpers, network_timezones, generic_queue, search, \
|
||||
failed_history, history, ui
|
||||
failed_history, history, ui, properFinder
|
||||
from sickbeard.search import wantedEpisodes
|
||||
|
||||
|
||||
|
@ -35,6 +35,7 @@ BACKLOG_SEARCH = 10
|
|||
RECENT_SEARCH = 20
|
||||
FAILED_SEARCH = 30
|
||||
MANUAL_SEARCH = 40
|
||||
PROPER_SEARCH = 50
|
||||
|
||||
MANUAL_SEARCH_HISTORY = []
|
||||
MANUAL_SEARCH_HISTORY_SIZE = 100
|
||||
|
@ -42,87 +43,122 @@ MANUAL_SEARCH_HISTORY_SIZE = 100
|
|||
class SearchQueue(generic_queue.GenericQueue):
|
||||
def __init__(self):
|
||||
generic_queue.GenericQueue.__init__(self)
|
||||
self.queue_name = "SEARCHQUEUE"
|
||||
self.queue_name = 'SEARCHQUEUE'
|
||||
|
||||
def is_in_queue(self, show, segment):
|
||||
for cur_item in self.queue:
|
||||
if isinstance(cur_item, BacklogQueueItem) and cur_item.show == show and cur_item.segment == segment:
|
||||
return True
|
||||
return False
|
||||
with self.lock:
|
||||
for cur_item in self.queue:
|
||||
if isinstance(cur_item, BacklogQueueItem) and cur_item.show == show and cur_item.segment == segment:
|
||||
return True
|
||||
return False
|
||||
|
||||
def is_ep_in_queue(self, segment):
|
||||
for cur_item in self.queue:
|
||||
if isinstance(cur_item, (ManualSearchQueueItem, FailedQueueItem)) and cur_item.segment == segment:
|
||||
return True
|
||||
return False
|
||||
with self.lock:
|
||||
for cur_item in self.queue:
|
||||
if isinstance(cur_item, (ManualSearchQueueItem, FailedQueueItem)) and cur_item.segment == segment:
|
||||
return True
|
||||
return False
|
||||
|
||||
def is_show_in_queue(self, show):
|
||||
for cur_item in self.queue:
|
||||
if isinstance(cur_item, (ManualSearchQueueItem, FailedQueueItem)) and cur_item.show.indexerid == show:
|
||||
return True
|
||||
return False
|
||||
with self.lock:
|
||||
for cur_item in self.queue:
|
||||
if isinstance(cur_item, (ManualSearchQueueItem, FailedQueueItem)) and cur_item.show.indexerid == show:
|
||||
return True
|
||||
return False
|
||||
|
||||
def get_all_ep_from_queue(self, show):
|
||||
ep_obj_list = []
|
||||
for cur_item in self.queue:
|
||||
if isinstance(cur_item, (ManualSearchQueueItem, FailedQueueItem)) and str(cur_item.show.indexerid) == show:
|
||||
ep_obj_list.append(cur_item)
|
||||
with self.lock:
|
||||
ep_obj_list = []
|
||||
for cur_item in self.queue:
|
||||
if isinstance(cur_item, (ManualSearchQueueItem, FailedQueueItem)) and str(cur_item.show.indexerid) == show:
|
||||
ep_obj_list.append(cur_item)
|
||||
|
||||
if ep_obj_list:
|
||||
return ep_obj_list
|
||||
return False
|
||||
if ep_obj_list:
|
||||
return ep_obj_list
|
||||
return False
|
||||
|
||||
def pause_backlog(self):
|
||||
self.min_priority = generic_queue.QueuePriorities.HIGH
|
||||
with self.lock:
|
||||
self.min_priority = generic_queue.QueuePriorities.HIGH
|
||||
|
||||
def unpause_backlog(self):
|
||||
self.min_priority = 0
|
||||
with self.lock:
|
||||
self.min_priority = 0
|
||||
|
||||
def is_backlog_paused(self):
|
||||
# backlog priorities are NORMAL, this should be done properly somewhere
|
||||
return self.min_priority >= generic_queue.QueuePriorities.NORMAL
|
||||
with self.lock:
|
||||
return self.min_priority >= generic_queue.QueuePriorities.NORMAL
|
||||
|
||||
def _is_in_progress(self, itemType):
|
||||
with self.lock:
|
||||
for cur_item in self.queue + [self.currentItem]:
|
||||
if isinstance(cur_item, itemType):
|
||||
return True
|
||||
return False
|
||||
|
||||
def is_manualsearch_in_progress(self):
|
||||
# Only referenced in webserve.py, only current running manualsearch or failedsearch is needed!!
|
||||
if isinstance(self.currentItem, (ManualSearchQueueItem, FailedQueueItem)):
|
||||
return True
|
||||
return False
|
||||
return self._is_in_progress((ManualSearchQueueItem, FailedQueueItem))
|
||||
|
||||
def is_backlog_in_progress(self):
|
||||
for cur_item in self.queue + [self.currentItem]:
|
||||
if isinstance(cur_item, BacklogQueueItem):
|
||||
return True
|
||||
return False
|
||||
|
||||
def is_standard_backlog_in_progress(self):
|
||||
for cur_item in self.queue + [self.currentItem]:
|
||||
if isinstance(cur_item, BacklogQueueItem) and cur_item.standard_backlog:
|
||||
return True
|
||||
return False
|
||||
return self._is_in_progress(BacklogQueueItem)
|
||||
|
||||
def is_recentsearch_in_progress(self):
|
||||
for cur_item in self.queue + [self.currentItem]:
|
||||
if isinstance(cur_item, RecentSearchQueueItem):
|
||||
return True
|
||||
return False
|
||||
return self._is_in_progress(RecentSearchQueueItem)
|
||||
|
||||
def is_propersearch_in_progress(self):
|
||||
return self._is_in_progress(ProperSearchQueueItem)
|
||||
|
||||
def is_standard_backlog_in_progress(self):
|
||||
with self.lock:
|
||||
for cur_item in self.queue + [self.currentItem]:
|
||||
if isinstance(cur_item, BacklogQueueItem) and cur_item.standard_backlog:
|
||||
return True
|
||||
return False
|
||||
|
||||
def type_of_backlog_in_progress(self):
|
||||
limited = full = other = False
|
||||
with self.lock:
|
||||
for cur_item in self.queue + [self.currentItem]:
|
||||
if isinstance(cur_item, BacklogQueueItem):
|
||||
if cur_item.standard_backlog:
|
||||
if cur_item.limited_backlog:
|
||||
limited = True
|
||||
else:
|
||||
full = True
|
||||
else:
|
||||
other = True
|
||||
|
||||
types = []
|
||||
for msg, variant in ['Limited', limited], ['Full', full], ['On Demand', other]:
|
||||
if variant:
|
||||
types.append(msg)
|
||||
message = 'None'
|
||||
if types:
|
||||
message = ', '.join(types)
|
||||
return message
|
||||
|
||||
def queue_length(self):
|
||||
length = {'backlog': 0, 'recent': 0, 'manual': 0, 'failed': 0}
|
||||
for cur_item in self.queue:
|
||||
if isinstance(cur_item, RecentSearchQueueItem):
|
||||
length['recent'] += 1
|
||||
elif isinstance(cur_item, BacklogQueueItem):
|
||||
length['backlog'] += 1
|
||||
elif isinstance(cur_item, ManualSearchQueueItem):
|
||||
length['manual'] += 1
|
||||
elif isinstance(cur_item, FailedQueueItem):
|
||||
length['failed'] += 1
|
||||
return length
|
||||
length = {'backlog': [], 'recent': 0, 'manual': [], 'failed': [], 'proper': 0}
|
||||
with self.lock:
|
||||
for cur_item in [self.currentItem] + self.queue:
|
||||
if isinstance(cur_item, RecentSearchQueueItem):
|
||||
length['recent'] += 1
|
||||
elif isinstance(cur_item, BacklogQueueItem):
|
||||
length['backlog'].append([cur_item.show.indexerid, cur_item.show.name, cur_item.segment, cur_item.standard_backlog, cur_item.limited_backlog, cur_item.forced])
|
||||
elif isinstance(cur_item, ProperSearchQueueItem):
|
||||
length['proper'] += 1
|
||||
elif isinstance(cur_item, ManualSearchQueueItem):
|
||||
length['manual'].append([cur_item.show.indexerid, cur_item.show.name, cur_item.segment])
|
||||
elif isinstance(cur_item, FailedQueueItem):
|
||||
length['failed'].append([cur_item.show.indexerid, cur_item.show.name, cur_item.segment])
|
||||
return length
|
||||
|
||||
|
||||
def add_item(self, item):
|
||||
if isinstance(item, RecentSearchQueueItem):
|
||||
# recent searches
|
||||
if isinstance(item, (RecentSearchQueueItem, ProperSearchQueueItem)):
|
||||
# recent and proper searches
|
||||
generic_queue.GenericQueue.add_item(self, item)
|
||||
elif isinstance(item, BacklogQueueItem) and not self.is_in_queue(item.show, item.segment):
|
||||
# backlog searches
|
||||
|
@ -131,7 +167,7 @@ class SearchQueue(generic_queue.GenericQueue):
|
|||
# manual and failed searches
|
||||
generic_queue.GenericQueue.add_item(self, item)
|
||||
else:
|
||||
logger.log(u"Not adding item, it's already in the queue", logger.DEBUG)
|
||||
logger.log(u'Not adding item, it\'s already in the queue', logger.DEBUG)
|
||||
|
||||
|
||||
class RecentSearchQueueItem(generic_queue.QueueItem):
|
||||
|
@ -143,49 +179,51 @@ class RecentSearchQueueItem(generic_queue.QueueItem):
|
|||
def run(self):
|
||||
generic_queue.QueueItem.run(self)
|
||||
|
||||
self._change_missing_episodes()
|
||||
try:
|
||||
self._change_missing_episodes()
|
||||
|
||||
self.update_providers()
|
||||
self.update_providers()
|
||||
|
||||
show_list = sickbeard.showList
|
||||
fromDate = datetime.date.fromordinal(1)
|
||||
for curShow in show_list:
|
||||
if curShow.paused:
|
||||
continue
|
||||
show_list = sickbeard.showList
|
||||
fromDate = datetime.date.fromordinal(1)
|
||||
for curShow in show_list:
|
||||
if curShow.paused:
|
||||
continue
|
||||
|
||||
self.episodes.extend(wantedEpisodes(curShow, fromDate))
|
||||
self.episodes.extend(wantedEpisodes(curShow, fromDate))
|
||||
|
||||
if not self.episodes:
|
||||
logger.log(u'No search of cache for episodes required')
|
||||
self.success = True
|
||||
else:
|
||||
num_shows = len(set([ep.show.name for ep in self.episodes]))
|
||||
logger.log(u'Found %d needed episode%s spanning %d show%s'
|
||||
% (len(self.episodes), helpers.maybe_plural(len(self.episodes)),
|
||||
num_shows, helpers.maybe_plural(num_shows)))
|
||||
if not self.episodes:
|
||||
logger.log(u'No search of cache for episodes required')
|
||||
self.success = True
|
||||
else:
|
||||
num_shows = len(set([ep.show.name for ep in self.episodes]))
|
||||
logger.log(u'Found %d needed episode%s spanning %d show%s'
|
||||
% (len(self.episodes), helpers.maybe_plural(len(self.episodes)),
|
||||
num_shows, helpers.maybe_plural(num_shows)))
|
||||
|
||||
try:
|
||||
logger.log(u'Beginning recent search for episodes')
|
||||
found_results = search.searchForNeededEpisodes(self.episodes)
|
||||
try:
|
||||
logger.log(u'Beginning recent search for episodes')
|
||||
found_results = search.searchForNeededEpisodes(self.episodes)
|
||||
|
||||
if not len(found_results):
|
||||
logger.log(u'No needed episodes found')
|
||||
else:
|
||||
for result in found_results:
|
||||
# just use the first result for now
|
||||
logger.log(u'Downloading %s from %s' % (result.name, result.provider.name))
|
||||
self.success = search.snatchEpisode(result)
|
||||
if not len(found_results):
|
||||
logger.log(u'No needed episodes found')
|
||||
else:
|
||||
for result in found_results:
|
||||
# just use the first result for now
|
||||
logger.log(u'Downloading %s from %s' % (result.name, result.provider.name))
|
||||
self.success = search.snatchEpisode(result)
|
||||
|
||||
# give the CPU a break
|
||||
time.sleep(common.cpu_presets[sickbeard.CPU_PRESET])
|
||||
# give the CPU a break
|
||||
time.sleep(common.cpu_presets[sickbeard.CPU_PRESET])
|
||||
|
||||
except Exception:
|
||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||
except Exception:
|
||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||
|
||||
if self.success is None:
|
||||
self.success = False
|
||||
if self.success is None:
|
||||
self.success = False
|
||||
|
||||
self.finish()
|
||||
finally:
|
||||
self.finish()
|
||||
|
||||
@staticmethod
|
||||
def _change_missing_episodes():
|
||||
|
@ -268,6 +306,21 @@ class RecentSearchQueueItem(generic_queue.QueueItem):
|
|||
logger.log('Finished updating provider caches')
|
||||
|
||||
|
||||
class ProperSearchQueueItem(generic_queue.QueueItem):
|
||||
def __init__(self):
|
||||
generic_queue.QueueItem.__init__(self, 'Proper Search', PROPER_SEARCH)
|
||||
self.priority = generic_queue.QueuePriorities.HIGH
|
||||
self.success = None
|
||||
|
||||
def run(self):
|
||||
generic_queue.QueueItem.run(self)
|
||||
|
||||
try:
|
||||
properFinder.searchPropers()
|
||||
finally:
|
||||
self.finish()
|
||||
|
||||
|
||||
class ManualSearchQueueItem(generic_queue.QueueItem):
|
||||
def __init__(self, show, segment):
|
||||
generic_queue.QueueItem.__init__(self, 'Manual Search', MANUAL_SEARCH)
|
||||
|
@ -282,14 +335,14 @@ class ManualSearchQueueItem(generic_queue.QueueItem):
|
|||
generic_queue.QueueItem.run(self)
|
||||
|
||||
try:
|
||||
logger.log("Beginning manual search for: [" + self.segment.prettyName() + "]")
|
||||
logger.log('Beginning manual search for: [' + self.segment.prettyName() + ']')
|
||||
self.started = True
|
||||
|
||||
searchResult = search.searchProviders(self.show, [self.segment], True)
|
||||
|
||||
if searchResult:
|
||||
# just use the first result for now
|
||||
logger.log(u"Downloading " + searchResult[0].name + " from " + searchResult[0].provider.name)
|
||||
logger.log(u'Downloading ' + searchResult[0].name + ' from ' + searchResult[0].provider.name)
|
||||
self.success = search.snatchEpisode(searchResult[0])
|
||||
|
||||
# give the CPU a break
|
||||
|
@ -297,24 +350,25 @@ class ManualSearchQueueItem(generic_queue.QueueItem):
|
|||
|
||||
else:
|
||||
ui.notifications.message('No downloads were found',
|
||||
"Couldn't find a download for <i>%s</i>" % self.segment.prettyName())
|
||||
'Couldn\'t find a download for <i>%s</i>' % self.segment.prettyName())
|
||||
|
||||
logger.log(u"Unable to find a download for: [" + self.segment.prettyName() + "]")
|
||||
logger.log(u'Unable to find a download for: [' + self.segment.prettyName() + ']')
|
||||
|
||||
except Exception:
|
||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||
|
||||
### Keep a list with the 100 last executed searches
|
||||
fifo(MANUAL_SEARCH_HISTORY, self, MANUAL_SEARCH_HISTORY_SIZE)
|
||||
finally:
|
||||
### Keep a list with the 100 last executed searches
|
||||
fifo(MANUAL_SEARCH_HISTORY, self, MANUAL_SEARCH_HISTORY_SIZE)
|
||||
|
||||
if self.success is None:
|
||||
self.success = False
|
||||
if self.success is None:
|
||||
self.success = False
|
||||
|
||||
self.finish()
|
||||
self.finish()
|
||||
|
||||
|
||||
class BacklogQueueItem(generic_queue.QueueItem):
|
||||
def __init__(self, show, segment, standard_backlog=False):
|
||||
def __init__(self, show, segment, standard_backlog=False, limited_backlog=False, forced=False):
|
||||
generic_queue.QueueItem.__init__(self, 'Backlog', BACKLOG_SEARCH)
|
||||
self.priority = generic_queue.QueuePriorities.LOW
|
||||
self.name = 'BACKLOG-' + str(show.indexerid)
|
||||
|
@ -322,28 +376,31 @@ class BacklogQueueItem(generic_queue.QueueItem):
|
|||
self.show = show
|
||||
self.segment = segment
|
||||
self.standard_backlog = standard_backlog
|
||||
self.limited_backlog = limited_backlog
|
||||
self.forced = forced
|
||||
|
||||
def run(self):
|
||||
generic_queue.QueueItem.run(self)
|
||||
|
||||
try:
|
||||
logger.log("Beginning backlog search for: [" + self.show.name + "]")
|
||||
logger.log('Beginning backlog search for: [' + self.show.name + ']')
|
||||
searchResult = search.searchProviders(self.show, self.segment, False)
|
||||
|
||||
if searchResult:
|
||||
for result in searchResult:
|
||||
# just use the first result for now
|
||||
logger.log(u"Downloading " + result.name + " from " + result.provider.name)
|
||||
logger.log(u'Downloading ' + result.name + ' from ' + result.provider.name)
|
||||
search.snatchEpisode(result)
|
||||
|
||||
# give the CPU a break
|
||||
time.sleep(common.cpu_presets[sickbeard.CPU_PRESET])
|
||||
else:
|
||||
logger.log(u"No needed episodes found during backlog search for: [" + self.show.name + "]")
|
||||
logger.log(u'No needed episodes found during backlog search for: [' + self.show.name + ']')
|
||||
except Exception:
|
||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||
|
||||
self.finish()
|
||||
finally:
|
||||
self.finish()
|
||||
|
||||
|
||||
class FailedQueueItem(generic_queue.QueueItem):
|
||||
|
@ -363,7 +420,7 @@ class FailedQueueItem(generic_queue.QueueItem):
|
|||
try:
|
||||
for epObj in self.segment:
|
||||
|
||||
logger.log(u"Marking episode as bad: [" + epObj.prettyName() + "]")
|
||||
logger.log(u'Marking episode as bad: [' + epObj.prettyName() + ']')
|
||||
|
||||
failed_history.markFailed(epObj)
|
||||
|
||||
|
@ -373,14 +430,14 @@ class FailedQueueItem(generic_queue.QueueItem):
|
|||
history.logFailed(epObj, release, provider)
|
||||
|
||||
failed_history.revertEpisode(epObj)
|
||||
logger.log("Beginning failed download search for: [" + epObj.prettyName() + "]")
|
||||
logger.log('Beginning failed download search for: [' + epObj.prettyName() + ']')
|
||||
|
||||
searchResult = search.searchProviders(self.show, self.segment, True)
|
||||
|
||||
if searchResult:
|
||||
for result in searchResult:
|
||||
# just use the first result for now
|
||||
logger.log(u"Downloading " + result.name + " from " + result.provider.name)
|
||||
logger.log(u'Downloading ' + result.name + ' from ' + result.provider.name)
|
||||
search.snatchEpisode(result)
|
||||
|
||||
# give the CPU a break
|
||||
|
@ -391,13 +448,14 @@ class FailedQueueItem(generic_queue.QueueItem):
|
|||
except Exception:
|
||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||
|
||||
### Keep a list with the 100 last executed searches
|
||||
fifo(MANUAL_SEARCH_HISTORY, self, MANUAL_SEARCH_HISTORY_SIZE)
|
||||
finally:
|
||||
### Keep a list with the 100 last executed searches
|
||||
fifo(MANUAL_SEARCH_HISTORY, self, MANUAL_SEARCH_HISTORY_SIZE)
|
||||
|
||||
if self.success is None:
|
||||
self.success = False
|
||||
if self.success is None:
|
||||
self.success = False
|
||||
|
||||
self.finish()
|
||||
self.finish()
|
||||
|
||||
def fifo(myList, item, maxSize = 100):
|
||||
if len(myList) >= maxSize:
|
||||
|
|
|
@ -31,65 +31,75 @@ from sickbeard import network_timezones
|
|||
from sickbeard import failed_history
|
||||
|
||||
class ShowUpdater():
|
||||
def __init__(self):
|
||||
self.amActive = False
|
||||
|
||||
def run(self, force=False):
|
||||
|
||||
update_datetime = datetime.datetime.now()
|
||||
update_date = update_datetime.date()
|
||||
self.amActive = True
|
||||
|
||||
# refresh network timezones
|
||||
network_timezones.update_network_dict()
|
||||
try:
|
||||
update_datetime = datetime.datetime.now()
|
||||
update_date = update_datetime.date()
|
||||
|
||||
# sure, why not?
|
||||
if sickbeard.USE_FAILED_DOWNLOADS:
|
||||
failed_history.trimHistory()
|
||||
# refresh network timezones
|
||||
network_timezones.update_network_dict()
|
||||
|
||||
# clear the data of unused providers
|
||||
sickbeard.helpers.clear_unused_providers()
|
||||
# sure, why not?
|
||||
if sickbeard.USE_FAILED_DOWNLOADS:
|
||||
failed_history.trimHistory()
|
||||
|
||||
logger.log(u"Doing full update on all shows")
|
||||
# clear the data of unused providers
|
||||
sickbeard.helpers.clear_unused_providers()
|
||||
|
||||
# clean out cache directory, remove everything > 12 hours old
|
||||
sickbeard.helpers.clearCache()
|
||||
logger.log(u'Doing full update on all shows')
|
||||
|
||||
# select 10 'Ended' tv_shows updated more than 90 days ago to include in this update
|
||||
stale_should_update = []
|
||||
stale_update_date = (update_date - datetime.timedelta(days=90)).toordinal()
|
||||
# clean out cache directory, remove everything > 12 hours old
|
||||
sickbeard.helpers.clearCache()
|
||||
|
||||
# last_update_date <= 90 days, sorted ASC because dates are ordinal
|
||||
myDB = db.DBConnection()
|
||||
sql_result = myDB.select(
|
||||
"SELECT indexer_id FROM tv_shows WHERE status = 'Ended' AND last_update_indexer <= ? ORDER BY last_update_indexer ASC LIMIT 10;",
|
||||
[stale_update_date])
|
||||
# select 10 'Ended' tv_shows updated more than 90 days ago and all shows not updated more then 180 days ago to include in this update
|
||||
stale_should_update = []
|
||||
stale_update_date = (update_date - datetime.timedelta(days=90)).toordinal()
|
||||
stale_update_date_max = (update_date - datetime.timedelta(days=180)).toordinal()
|
||||
|
||||
for cur_result in sql_result:
|
||||
stale_should_update.append(int(cur_result['indexer_id']))
|
||||
# last_update_date <= 90 days, sorted ASC because dates are ordinal
|
||||
myDB = db.DBConnection()
|
||||
sql_results = myDB.mass_action([[
|
||||
'SELECT indexer_id FROM tv_shows WHERE last_update_indexer <= ? AND last_update_indexer >= ? ORDER BY last_update_indexer ASC LIMIT 10;',
|
||||
[stale_update_date, stale_update_date_max]], ['SELECT indexer_id FROM tv_shows WHERE last_update_indexer < ?;', [stale_update_date_max]]])
|
||||
|
||||
# start update process
|
||||
piList = []
|
||||
for curShow in sickbeard.showList:
|
||||
for sql_result in sql_results:
|
||||
for cur_result in sql_result:
|
||||
stale_should_update.append(int(cur_result['indexer_id']))
|
||||
|
||||
try:
|
||||
# get next episode airdate
|
||||
curShow.nextEpisode()
|
||||
# start update process
|
||||
piList = []
|
||||
for curShow in sickbeard.showList:
|
||||
|
||||
# if should_update returns True (not 'Ended') or show is selected stale 'Ended' then update, otherwise just refresh
|
||||
if curShow.should_update(update_date=update_date) or curShow.indexerid in stale_should_update:
|
||||
curQueueItem = sickbeard.showQueueScheduler.action.updateShow(curShow, True) # @UndefinedVariable
|
||||
else:
|
||||
logger.log(
|
||||
u"Not updating episodes for show " + curShow.name + " because it's marked as ended and last/next episode is not within the grace period.",
|
||||
logger.DEBUG)
|
||||
curQueueItem = sickbeard.showQueueScheduler.action.refreshShow(curShow, True) # @UndefinedVariable
|
||||
try:
|
||||
# get next episode airdate
|
||||
curShow.nextEpisode()
|
||||
|
||||
piList.append(curQueueItem)
|
||||
# if should_update returns True (not 'Ended') or show is selected stale 'Ended' then update, otherwise just refresh
|
||||
if curShow.should_update(update_date=update_date) or curShow.indexerid in stale_should_update:
|
||||
curQueueItem = sickbeard.showQueueScheduler.action.updateShow(curShow, scheduled_update=True) # @UndefinedVariable
|
||||
else:
|
||||
logger.log(
|
||||
u'Not updating episodes for show ' + curShow.name + ' because it\'s marked as ended and last/next episode is not within the grace period.',
|
||||
logger.DEBUG)
|
||||
curQueueItem = sickbeard.showQueueScheduler.action.refreshShow(curShow, True, True) # @UndefinedVariable
|
||||
|
||||
except (exceptions.CantUpdateException, exceptions.CantRefreshException), e:
|
||||
logger.log(u"Automatic update failed: " + ex(e), logger.ERROR)
|
||||
piList.append(curQueueItem)
|
||||
|
||||
ui.ProgressIndicators.setIndicator('dailyUpdate', ui.QueueProgressIndicator("Daily Update", piList))
|
||||
except (exceptions.CantUpdateException, exceptions.CantRefreshException), e:
|
||||
logger.log(u'Automatic update failed: ' + ex(e), logger.ERROR)
|
||||
|
||||
logger.log(u"Completed full update on all shows")
|
||||
ui.ProgressIndicators.setIndicator('dailyUpdate', ui.QueueProgressIndicator('Daily Update', piList))
|
||||
|
||||
logger.log(u'Added all shows to show queue for full update')
|
||||
|
||||
finally:
|
||||
self.amActive = False
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
|
|
@ -145,7 +145,7 @@ def makeSceneSeasonSearchString(show, ep_obj, extraSearchType=None):
|
|||
# for providers that don't allow multiple searches in one request we only search for Sxx style stuff
|
||||
else:
|
||||
for cur_season in seasonStrings:
|
||||
if len(show.release_groups.whitelist) > 0:
|
||||
if show.is_anime and show.release_groups is not None and show.release_groups.whitelist:
|
||||
for keyword in show.release_groups.whitelist:
|
||||
toReturn.append(keyword + '.' + curShow+ "." + cur_season)
|
||||
else:
|
||||
|
@ -182,7 +182,7 @@ def makeSceneSearchString(show, ep_obj):
|
|||
|
||||
for curShow in showNames:
|
||||
for curEpString in epStrings:
|
||||
if len(ep_obj.show.release_groups.whitelist) > 0:
|
||||
if ep_obj.show.is_anime and ep_obj.show.release_groups is not None and ep_obj.show.release_groups.whitelist:
|
||||
for keyword in ep_obj.show.release_groups.whitelist:
|
||||
toReturn.append(keyword + '.' + curShow + '.' + curEpString)
|
||||
else:
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# Author: Nic Wolfe <nic@wolfeden.ca>
|
||||
# Author: Nic Wolfe <nic@wolfeden.ca>
|
||||
# URL: http://code.google.com/p/sickbeard/
|
||||
#
|
||||
# This file is part of SickGear.
|
||||
|
@ -34,14 +34,16 @@ from sickbeard.blackandwhitelist import BlackAndWhiteList
|
|||
class ShowQueue(generic_queue.GenericQueue):
|
||||
def __init__(self):
|
||||
generic_queue.GenericQueue.__init__(self)
|
||||
self.queue_name = "SHOWQUEUE"
|
||||
self.queue_name = 'SHOWQUEUE'
|
||||
|
||||
def _isInQueue(self, show, actions):
|
||||
return show in [x.show for x in self.queue if x.action_id in actions]
|
||||
with self.lock:
|
||||
return show in [x.show for x in self.queue if x.action_id in actions]
|
||||
|
||||
def _isBeingSomethinged(self, show, actions):
|
||||
return self.currentItem != None and show == self.currentItem.show and \
|
||||
self.currentItem.action_id in actions
|
||||
with self.lock:
|
||||
return self.currentItem != None and show == self.currentItem.show and \
|
||||
self.currentItem.action_id in actions
|
||||
|
||||
def isInUpdateQueue(self, show):
|
||||
return self._isInQueue(show, (ShowQueueActions.UPDATE, ShowQueueActions.FORCEUPDATE))
|
||||
|
@ -70,48 +72,77 @@ class ShowQueue(generic_queue.GenericQueue):
|
|||
def isBeingSubtitled(self, show):
|
||||
return self._isBeingSomethinged(show, (ShowQueueActions.SUBTITLE,))
|
||||
|
||||
def isShowUpdateRunning(self):
|
||||
with self.lock:
|
||||
for x in self.queue + [self.currentItem]:
|
||||
if isinstance(x, ShowQueueItem) and x.scheduled_update:
|
||||
return True
|
||||
return False
|
||||
|
||||
def _getLoadingShowList(self):
|
||||
return [x for x in self.queue + [self.currentItem] if x != None and x.isLoading]
|
||||
with self.lock:
|
||||
return [x for x in self.queue + [self.currentItem] if x != None and x.isLoading]
|
||||
|
||||
def queue_length(self):
|
||||
length = {'add': [], 'update': [], 'forceupdate': [], 'forceupdateweb': [], 'refresh': [], 'rename': [], 'subtitle': []}
|
||||
with self.lock:
|
||||
for cur_item in [self.currentItem] + self.queue:
|
||||
if isinstance(cur_item, QueueItemAdd):
|
||||
length['add'].append([cur_item.show_name, cur_item.scheduled_update])
|
||||
elif isinstance(cur_item, QueueItemUpdate):
|
||||
update_type = 'Normal'
|
||||
if isinstance(cur_item, QueueItemForceUpdate):
|
||||
update_type = 'Forced'
|
||||
elif isinstance(cur_item, QueueItemForceUpdateWeb):
|
||||
update_type = 'Forced Web'
|
||||
length['update'].append([cur_item.show_name, cur_item.scheduled_update, update_type])
|
||||
elif isinstance(cur_item, QueueItemRefresh):
|
||||
length['refresh'].append([cur_item.show_name, cur_item.scheduled_update])
|
||||
elif isinstance(cur_item, QueueItemRename):
|
||||
length['rename'].append([cur_item.show_name, cur_item.scheduled_update])
|
||||
elif isinstance(cur_item, QueueItemSubtitle):
|
||||
length['subtitle'].append([cur_item.show_name, cur_item.scheduled_update])
|
||||
return length
|
||||
|
||||
loadingShowList = property(_getLoadingShowList)
|
||||
|
||||
def updateShow(self, show, force=False, web=False):
|
||||
def updateShow(self, show, force=False, web=False, scheduled_update=False):
|
||||
|
||||
if self.isBeingAdded(show):
|
||||
raise exceptions.CantUpdateException(
|
||||
"Show is still being added, wait until it is finished before you update.")
|
||||
'Show is still being added, wait until it is finished before you update.')
|
||||
|
||||
if self.isBeingUpdated(show):
|
||||
raise exceptions.CantUpdateException(
|
||||
"This show is already being updated, can't update again until it's done.")
|
||||
'This show is already being updated, can\'t update again until it\'s done.')
|
||||
|
||||
if self.isInUpdateQueue(show):
|
||||
raise exceptions.CantUpdateException(
|
||||
"This show is already being updated, can't update again until it's done.")
|
||||
'This show is already being updated, can\'t update again until it\'s done.')
|
||||
|
||||
if not force:
|
||||
queueItemObj = QueueItemUpdate(show)
|
||||
queueItemObj = QueueItemUpdate(show, scheduled_update=scheduled_update)
|
||||
elif web:
|
||||
queueItemObj = QueueItemForceUpdateWeb(show)
|
||||
queueItemObj = QueueItemForceUpdateWeb(show, scheduled_update=scheduled_update)
|
||||
else:
|
||||
queueItemObj = QueueItemForceUpdate(show)
|
||||
queueItemObj = QueueItemForceUpdate(show, scheduled_update=scheduled_update)
|
||||
|
||||
self.add_item(queueItemObj)
|
||||
|
||||
return queueItemObj
|
||||
|
||||
def refreshShow(self, show, force=False):
|
||||
def refreshShow(self, show, force=False, scheduled_update=False):
|
||||
|
||||
if self.isBeingRefreshed(show) and not force:
|
||||
raise exceptions.CantRefreshException("This show is already being refreshed, not refreshing again.")
|
||||
raise exceptions.CantRefreshException('This show is already being refreshed, not refreshing again.')
|
||||
|
||||
if (self.isBeingUpdated(show) or self.isInUpdateQueue(show)) and not force:
|
||||
logger.log(
|
||||
u"A refresh was attempted but there is already an update queued or in progress. Since updates do a refresh at the end anyway I'm skipping this request.",
|
||||
u'A refresh was attempted but there is already an update queued or in progress. Since updates do a refresh at the end anyway I\'m skipping this request.',
|
||||
logger.DEBUG)
|
||||
return
|
||||
|
||||
queueItemObj = QueueItemRefresh(show, force=force)
|
||||
queueItemObj = QueueItemRefresh(show, force=force, scheduled_update=scheduled_update)
|
||||
|
||||
self.add_item(queueItemObj)
|
||||
|
||||
|
@ -134,7 +165,7 @@ class ShowQueue(generic_queue.GenericQueue):
|
|||
return queueItemObj
|
||||
|
||||
def addShow(self, indexer, indexer_id, showDir, default_status=None, quality=None, flatten_folders=None,
|
||||
lang="en", subtitles=None, anime=None, scene=None, paused=None, blacklist=None, whitelist=None,
|
||||
lang='en', subtitles=None, anime=None, scene=None, paused=None, blacklist=None, whitelist=None,
|
||||
wanted_begin=None, wanted_latest=None, tag=None):
|
||||
queueItemObj = QueueItemAdd(indexer, indexer_id, showDir, default_status, quality, flatten_folders, lang,
|
||||
subtitles, anime, scene, paused, blacklist, whitelist,
|
||||
|
@ -173,9 +204,10 @@ class ShowQueueItem(generic_queue.QueueItem):
|
|||
- show being subtitled
|
||||
"""
|
||||
|
||||
def __init__(self, action_id, show):
|
||||
def __init__(self, action_id, show, scheduled_update=False):
|
||||
generic_queue.QueueItem.__init__(self, ShowQueueActions.names[action_id], action_id)
|
||||
self.show = show
|
||||
self.scheduled_update = scheduled_update
|
||||
|
||||
def isInQueue(self):
|
||||
return self in sickbeard.showQueueScheduler.action.queue + [
|
||||
|
@ -194,7 +226,7 @@ class ShowQueueItem(generic_queue.QueueItem):
|
|||
|
||||
class QueueItemAdd(ShowQueueItem):
|
||||
def __init__(self, indexer, indexer_id, showDir, default_status, quality, flatten_folders, lang, subtitles, anime,
|
||||
scene, paused, blacklist, whitelist, default_wanted_begin, default_wanted_latest, tag):
|
||||
scene, paused, blacklist, whitelist, default_wanted_begin, default_wanted_latest, tag, scheduled_update=False):
|
||||
|
||||
self.indexer = indexer
|
||||
self.indexer_id = indexer_id
|
||||
|
@ -216,7 +248,7 @@ class QueueItemAdd(ShowQueueItem):
|
|||
self.show = None
|
||||
|
||||
# this will initialize self.show to None
|
||||
ShowQueueItem.__init__(self, ShowQueueActions.ADD, self.show)
|
||||
ShowQueueItem.__init__(self, ShowQueueActions.ADD, self.show, scheduled_update)
|
||||
|
||||
def _getName(self):
|
||||
"""
|
||||
|
@ -244,7 +276,7 @@ class QueueItemAdd(ShowQueueItem):
|
|||
|
||||
ShowQueueItem.run(self)
|
||||
|
||||
logger.log(u"Starting to add show " + self.showDir)
|
||||
logger.log(u'Starting to add show ' + self.showDir)
|
||||
# make sure the Indexer IDs are valid
|
||||
try:
|
||||
|
||||
|
@ -252,37 +284,37 @@ class QueueItemAdd(ShowQueueItem):
|
|||
if self.lang:
|
||||
lINDEXER_API_PARMS['language'] = self.lang
|
||||
|
||||
logger.log(u"" + str(sickbeard.indexerApi(self.indexer).name) + ": " + repr(lINDEXER_API_PARMS))
|
||||
logger.log(u'' + str(sickbeard.indexerApi(self.indexer).name) + ': ' + repr(lINDEXER_API_PARMS))
|
||||
|
||||
t = sickbeard.indexerApi(self.indexer).indexer(**lINDEXER_API_PARMS)
|
||||
s = t[self.indexer_id]
|
||||
|
||||
# this usually only happens if they have an NFO in their show dir which gave us a Indexer ID that has no proper english version of the show
|
||||
if getattr(s, 'seriesname', None) is None:
|
||||
logger.log(u"Show in " + self.showDir + " has no name on " + str(
|
||||
sickbeard.indexerApi(self.indexer).name) + ", probably the wrong language used to search with.",
|
||||
logger.log(u'Show in ' + self.showDir + ' has no name on ' + str(
|
||||
sickbeard.indexerApi(self.indexer).name) + ', probably the wrong language used to search with.',
|
||||
logger.ERROR)
|
||||
ui.notifications.error("Unable to add show",
|
||||
"Show in " + self.showDir + " has no name on " + str(sickbeard.indexerApi(
|
||||
self.indexer).name) + ", probably the wrong language. Delete .nfo and add manually in the correct language.")
|
||||
ui.notifications.error('Unable to add show',
|
||||
'Show in ' + self.showDir + ' has no name on ' + str(sickbeard.indexerApi(
|
||||
self.indexer).name) + ', probably the wrong language. Delete .nfo and add manually in the correct language.')
|
||||
self._finishEarly()
|
||||
return
|
||||
# if the show has no episodes/seasons
|
||||
if not s:
|
||||
logger.log(u"Show " + str(s['seriesname']) + " is on " + str(
|
||||
sickbeard.indexerApi(self.indexer).name) + " but contains no season/episode data.", logger.ERROR)
|
||||
ui.notifications.error("Unable to add show",
|
||||
"Show " + str(s['seriesname']) + " is on " + str(sickbeard.indexerApi(
|
||||
self.indexer).name) + " but contains no season/episode data.")
|
||||
logger.log(u'Show ' + str(s['seriesname']) + ' is on ' + str(
|
||||
sickbeard.indexerApi(self.indexer).name) + ' but contains no season/episode data.', logger.ERROR)
|
||||
ui.notifications.error('Unable to add show',
|
||||
'Show ' + str(s['seriesname']) + ' is on ' + str(sickbeard.indexerApi(
|
||||
self.indexer).name) + ' but contains no season/episode data.')
|
||||
self._finishEarly()
|
||||
return
|
||||
except Exception, e:
|
||||
logger.log(u"Unable to find show ID:" + str(self.indexer_id) + " on Indexer: " + str(
|
||||
logger.log(u'Unable to find show ID:' + str(self.indexer_id) + ' on Indexer: ' + str(
|
||||
sickbeard.indexerApi(self.indexer).name), logger.ERROR)
|
||||
ui.notifications.error("Unable to add show",
|
||||
"Unable to look up the show in " + self.showDir + " on " + str(sickbeard.indexerApi(
|
||||
self.indexer).name) + " using ID " + str(
|
||||
self.indexer_id) + ", not using the NFO. Delete .nfo and try adding manually again.")
|
||||
ui.notifications.error('Unable to add show',
|
||||
'Unable to look up the show in ' + self.showDir + ' on ' + str(sickbeard.indexerApi(
|
||||
self.indexer).name) + ' using ID ' + str(
|
||||
self.indexer_id) + ', not using the NFO. Delete .nfo and try adding manually again.')
|
||||
self._finishEarly()
|
||||
return
|
||||
|
||||
|
@ -310,35 +342,35 @@ class QueueItemAdd(ShowQueueItem):
|
|||
self.show.release_groups.set_white_keywords(self.whitelist)
|
||||
|
||||
# be smartish about this
|
||||
if self.show.genre and "talk show" in self.show.genre.lower():
|
||||
if self.show.genre and 'talk show' in self.show.genre.lower():
|
||||
self.show.air_by_date = 1
|
||||
if self.show.genre and "documentary" in self.show.genre.lower():
|
||||
if self.show.genre and 'documentary' in self.show.genre.lower():
|
||||
self.show.air_by_date = 0
|
||||
if self.show.classification and "sports" in self.show.classification.lower():
|
||||
if self.show.classification and 'sports' in self.show.classification.lower():
|
||||
self.show.sports = 1
|
||||
|
||||
except sickbeard.indexer_exception, e:
|
||||
logger.log(
|
||||
u"Unable to add show due to an error with " + sickbeard.indexerApi(self.indexer).name + ": " + ex(e),
|
||||
u'Unable to add show due to an error with ' + sickbeard.indexerApi(self.indexer).name + ': ' + ex(e),
|
||||
logger.ERROR)
|
||||
if self.show:
|
||||
ui.notifications.error(
|
||||
"Unable to add " + str(self.show.name) + " due to an error with " + sickbeard.indexerApi(
|
||||
self.indexer).name + "")
|
||||
'Unable to add ' + str(self.show.name) + ' due to an error with ' + sickbeard.indexerApi(
|
||||
self.indexer).name + '')
|
||||
else:
|
||||
ui.notifications.error(
|
||||
"Unable to add show due to an error with " + sickbeard.indexerApi(self.indexer).name + "")
|
||||
'Unable to add show due to an error with ' + sickbeard.indexerApi(self.indexer).name + '')
|
||||
self._finishEarly()
|
||||
return
|
||||
|
||||
except exceptions.MultipleShowObjectsException:
|
||||
logger.log(u"The show in " + self.showDir + " is already in your show list, skipping", logger.ERROR)
|
||||
ui.notifications.error('Show skipped', "The show in " + self.showDir + " is already in your show list")
|
||||
logger.log(u'The show in ' + self.showDir + ' is already in your show list, skipping', logger.ERROR)
|
||||
ui.notifications.error('Show skipped', 'The show in ' + self.showDir + ' is already in your show list')
|
||||
self._finishEarly()
|
||||
return
|
||||
|
||||
except Exception, e:
|
||||
logger.log(u"Error trying to add show: " + ex(e), logger.ERROR)
|
||||
logger.log(u'Error trying to add show: ' + ex(e), logger.ERROR)
|
||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||
self._finishEarly()
|
||||
raise
|
||||
|
@ -348,7 +380,7 @@ class QueueItemAdd(ShowQueueItem):
|
|||
try:
|
||||
self.show.saveToDB()
|
||||
except Exception, e:
|
||||
logger.log(u"Error saving the show to the database: " + ex(e), logger.ERROR)
|
||||
logger.log(u'Error saving the show to the database: ' + ex(e), logger.ERROR)
|
||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||
self._finishEarly()
|
||||
raise
|
||||
|
@ -360,7 +392,7 @@ class QueueItemAdd(ShowQueueItem):
|
|||
self.show.loadEpisodesFromIndexer()
|
||||
except Exception, e:
|
||||
logger.log(
|
||||
u"Error with " + sickbeard.indexerApi(self.show.indexer).name + ", not creating episode list: " + ex(e),
|
||||
u'Error with ' + sickbeard.indexerApi(self.show.indexer).name + ', not creating episode list: ' + ex(e),
|
||||
logger.ERROR)
|
||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||
|
||||
|
@ -370,14 +402,14 @@ class QueueItemAdd(ShowQueueItem):
|
|||
try:
|
||||
self.show.loadEpisodesFromDir()
|
||||
except Exception, e:
|
||||
logger.log(u"Error searching directory for episodes: " + ex(e), logger.ERROR)
|
||||
logger.log(u'Error searching directory for episodes: ' + ex(e), logger.ERROR)
|
||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||
|
||||
# if they gave a custom status then change all the eps to it
|
||||
my_db = db.DBConnection()
|
||||
if self.default_status != SKIPPED:
|
||||
logger.log(u"Setting all episodes to the specified default status: " + str(self.default_status))
|
||||
my_db.action("UPDATE tv_episodes SET status = ? WHERE status = ? AND showid = ? AND season != 0",
|
||||
logger.log(u'Setting all episodes to the specified default status: ' + str(self.default_status))
|
||||
my_db.action('UPDATE tv_episodes SET status = ? WHERE status = ? AND showid = ? AND season != 0',
|
||||
[self.default_status, SKIPPED, self.show.indexerid])
|
||||
|
||||
# if they gave a number to start or number to end as wanted, then change those eps to it
|
||||
|
@ -399,7 +431,7 @@ class QueueItemAdd(ShowQueueItem):
|
|||
|
||||
wanted_updates = db_obj.select(select)
|
||||
db_obj.action(update)
|
||||
result = db_obj.select("SELECT changes() as last FROM [tv_episodes]")
|
||||
result = db_obj.select('SELECT changes() as last FROM [tv_episodes]')
|
||||
for cur_result in result:
|
||||
actual = cur_result['last']
|
||||
break
|
||||
|
@ -457,8 +489,8 @@ class QueueItemAdd(ShowQueueItem):
|
|||
|
||||
|
||||
class QueueItemRefresh(ShowQueueItem):
|
||||
def __init__(self, show=None, force=False):
|
||||
ShowQueueItem.__init__(self, ShowQueueActions.REFRESH, show)
|
||||
def __init__(self, show=None, force=False, scheduled_update=False):
|
||||
ShowQueueItem.__init__(self, ShowQueueActions.REFRESH, show, scheduled_update)
|
||||
|
||||
# do refreshes first because they're quick
|
||||
self.priority = generic_queue.QueuePriorities.HIGH
|
||||
|
@ -469,7 +501,7 @@ class QueueItemRefresh(ShowQueueItem):
|
|||
def run(self):
|
||||
ShowQueueItem.run(self)
|
||||
|
||||
logger.log(u"Performing refresh on " + self.show.name)
|
||||
logger.log(u'Performing refresh on ' + self.show.name)
|
||||
|
||||
self.show.refreshDir()
|
||||
self.show.writeMetadata()
|
||||
|
@ -484,19 +516,19 @@ class QueueItemRefresh(ShowQueueItem):
|
|||
|
||||
|
||||
class QueueItemRename(ShowQueueItem):
|
||||
def __init__(self, show=None):
|
||||
ShowQueueItem.__init__(self, ShowQueueActions.RENAME, show)
|
||||
def __init__(self, show=None, scheduled_update=False):
|
||||
ShowQueueItem.__init__(self, ShowQueueActions.RENAME, show, scheduled_update)
|
||||
|
||||
def run(self):
|
||||
|
||||
ShowQueueItem.run(self)
|
||||
|
||||
logger.log(u"Performing rename on " + self.show.name)
|
||||
logger.log(u'Performing rename on ' + self.show.name)
|
||||
|
||||
try:
|
||||
show_loc = self.show.location
|
||||
except exceptions.ShowDirNotFoundException:
|
||||
logger.log(u"Can't perform rename on " + self.show.name + " when the show directory is missing.", logger.WARNING)
|
||||
logger.log(u'Can\'t perform rename on ' + self.show.name + ' when the show directory is missing.', logger.WARNING)
|
||||
return
|
||||
|
||||
ep_obj_rename_list = []
|
||||
|
@ -525,13 +557,13 @@ class QueueItemRename(ShowQueueItem):
|
|||
|
||||
|
||||
class QueueItemSubtitle(ShowQueueItem):
|
||||
def __init__(self, show=None):
|
||||
ShowQueueItem.__init__(self, ShowQueueActions.SUBTITLE, show)
|
||||
def __init__(self, show=None, scheduled_update=False):
|
||||
ShowQueueItem.__init__(self, ShowQueueActions.SUBTITLE, show, scheduled_update)
|
||||
|
||||
def run(self):
|
||||
ShowQueueItem.run(self)
|
||||
|
||||
logger.log(u"Downloading subtitles for " + self.show.name)
|
||||
logger.log(u'Downloading subtitles for ' + self.show.name)
|
||||
|
||||
self.show.downloadSubtitles()
|
||||
|
||||
|
@ -539,8 +571,8 @@ class QueueItemSubtitle(ShowQueueItem):
|
|||
|
||||
|
||||
class QueueItemUpdate(ShowQueueItem):
|
||||
def __init__(self, show=None):
|
||||
ShowQueueItem.__init__(self, ShowQueueActions.UPDATE, show)
|
||||
def __init__(self, show=None, scheduled_update=False):
|
||||
ShowQueueItem.__init__(self, ShowQueueActions.UPDATE, show, scheduled_update)
|
||||
self.force = False
|
||||
self.force_web = False
|
||||
|
||||
|
@ -548,20 +580,20 @@ class QueueItemUpdate(ShowQueueItem):
|
|||
|
||||
ShowQueueItem.run(self)
|
||||
|
||||
logger.log(u"Beginning update of " + self.show.name)
|
||||
logger.log(u'Beginning update of ' + self.show.name)
|
||||
|
||||
logger.log(u"Retrieving show info from " + sickbeard.indexerApi(self.show.indexer).name + "", logger.DEBUG)
|
||||
logger.log(u'Retrieving show info from ' + sickbeard.indexerApi(self.show.indexer).name + '', logger.DEBUG)
|
||||
try:
|
||||
result = self.show.loadFromIndexer(cache=not self.force)
|
||||
if None is not result:
|
||||
return
|
||||
except sickbeard.indexer_error, e:
|
||||
logger.log(u"Unable to contact " + sickbeard.indexerApi(self.show.indexer).name + ", aborting: " + ex(e),
|
||||
logger.log(u'Unable to contact ' + sickbeard.indexerApi(self.show.indexer).name + ', aborting: ' + ex(e),
|
||||
logger.WARNING)
|
||||
return
|
||||
except sickbeard.indexer_attributenotfound, e:
|
||||
logger.log(u"Data retrieved from " + sickbeard.indexerApi(
|
||||
self.show.indexer).name + " was incomplete, aborting: " + ex(e), logger.ERROR)
|
||||
logger.log(u'Data retrieved from ' + sickbeard.indexerApi(
|
||||
self.show.indexer).name + ' was incomplete, aborting: ' + ex(e), logger.ERROR)
|
||||
return
|
||||
|
||||
if self.force_web:
|
||||
|
@ -570,30 +602,30 @@ class QueueItemUpdate(ShowQueueItem):
|
|||
try:
|
||||
self.show.saveToDB()
|
||||
except Exception, e:
|
||||
logger.log(u"Error saving the episode to the database: " + ex(e), logger.ERROR)
|
||||
logger.log(u'Error saving the episode to the database: ' + ex(e), logger.ERROR)
|
||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||
|
||||
# get episode list from DB
|
||||
logger.log(u"Loading all episodes from the database", logger.DEBUG)
|
||||
logger.log(u'Loading all episodes from the database', logger.DEBUG)
|
||||
DBEpList = self.show.loadEpisodesFromDB()
|
||||
|
||||
# get episode list from TVDB
|
||||
logger.log(u"Loading all episodes from " + sickbeard.indexerApi(self.show.indexer).name + "", logger.DEBUG)
|
||||
logger.log(u'Loading all episodes from ' + sickbeard.indexerApi(self.show.indexer).name + '', logger.DEBUG)
|
||||
try:
|
||||
IndexerEpList = self.show.loadEpisodesFromIndexer(cache=not self.force)
|
||||
except sickbeard.indexer_exception, e:
|
||||
logger.log(u"Unable to get info from " + sickbeard.indexerApi(
|
||||
self.show.indexer).name + ", the show info will not be refreshed: " + ex(e), logger.ERROR)
|
||||
logger.log(u'Unable to get info from ' + sickbeard.indexerApi(
|
||||
self.show.indexer).name + ', the show info will not be refreshed: ' + ex(e), logger.ERROR)
|
||||
IndexerEpList = None
|
||||
|
||||
if IndexerEpList == None:
|
||||
logger.log(u"No data returned from " + sickbeard.indexerApi(
|
||||
self.show.indexer).name + ", unable to update this show", logger.ERROR)
|
||||
logger.log(u'No data returned from ' + sickbeard.indexerApi(
|
||||
self.show.indexer).name + ', unable to update this show', logger.ERROR)
|
||||
else:
|
||||
# for each ep we found on TVDB delete it from the DB list
|
||||
for curSeason in IndexerEpList:
|
||||
for curEpisode in IndexerEpList[curSeason]:
|
||||
logger.log(u"Removing " + str(curSeason) + "x" + str(curEpisode) + " from the DB list",
|
||||
logger.log(u'Removing ' + str(curSeason) + 'x' + str(curEpisode) + ' from the DB list',
|
||||
logger.DEBUG)
|
||||
if curSeason in DBEpList and curEpisode in DBEpList[curSeason]:
|
||||
del DBEpList[curSeason][curEpisode]
|
||||
|
@ -601,8 +633,8 @@ class QueueItemUpdate(ShowQueueItem):
|
|||
# for the remaining episodes in the DB list just delete them from the DB
|
||||
for curSeason in DBEpList:
|
||||
for curEpisode in DBEpList[curSeason]:
|
||||
logger.log(u"Permanently deleting episode " + str(curSeason) + "x" + str(
|
||||
curEpisode) + " from the database", logger.MESSAGE)
|
||||
logger.log(u'Permanently deleting episode ' + str(curSeason) + 'x' + str(
|
||||
curEpisode) + ' from the database', logger.MESSAGE)
|
||||
curEp = self.show.getEpisode(curSeason, curEpisode)
|
||||
try:
|
||||
curEp.deleteEpisode()
|
||||
|
@ -613,14 +645,14 @@ class QueueItemUpdate(ShowQueueItem):
|
|||
|
||||
|
||||
class QueueItemForceUpdate(QueueItemUpdate):
|
||||
def __init__(self, show=None):
|
||||
ShowQueueItem.__init__(self, ShowQueueActions.FORCEUPDATE, show)
|
||||
def __init__(self, show=None, scheduled_update=False):
|
||||
ShowQueueItem.__init__(self, ShowQueueActions.FORCEUPDATE, show, scheduled_update)
|
||||
self.force = True
|
||||
self.force_web = False
|
||||
|
||||
|
||||
class QueueItemForceUpdateWeb(QueueItemUpdate):
|
||||
def __init__(self, show=None):
|
||||
ShowQueueItem.__init__(self, ShowQueueActions.FORCEUPDATE, show)
|
||||
def __init__(self, show=None, scheduled_update=False):
|
||||
ShowQueueItem.__init__(self, ShowQueueActions.FORCEUPDATE, show, scheduled_update)
|
||||
self.force = True
|
||||
self.force_web = True
|
|
@ -289,48 +289,40 @@ class TVShow(object):
|
|||
# In some situations self.status = None.. need to figure out where that is!
|
||||
if not self.status:
|
||||
self.status = ''
|
||||
logger.log("Status missing for showid: [%s] with status: [%s]" %
|
||||
logger.log('Status missing for showid: [%s] with status: [%s]' %
|
||||
(cur_indexerid, self.status), logger.DEBUG)
|
||||
|
||||
# if show is not 'Ended' always update (status 'Continuing' or '')
|
||||
if 'Ended' not in self.status:
|
||||
return True
|
||||
|
||||
# run logic against the current show latest aired and next unaired data to see if we should bypass 'Ended' status
|
||||
|
||||
graceperiod = datetime.timedelta(days=30)
|
||||
|
||||
last_airdate = datetime.date.fromordinal(1)
|
||||
|
||||
# get latest aired episode to compare against today - graceperiod and today + graceperiod
|
||||
myDB = db.DBConnection()
|
||||
sql_result = myDB.select(
|
||||
"SELECT * FROM tv_episodes WHERE showid = ? AND season > '0' AND airdate > '1' AND status > '1' ORDER BY airdate DESC LIMIT 1",
|
||||
[cur_indexerid])
|
||||
sql_result = myDB.mass_action(
|
||||
[['SELECT airdate FROM [tv_episodes] WHERE showid = ? AND season > "0" ORDER BY season DESC, episode DESC LIMIT 1', [cur_indexerid]],
|
||||
['SELECT airdate FROM [tv_episodes] WHERE showid = ? AND season > "0" AND airdate > "1" ORDER BY airdate DESC LIMIT 1', [cur_indexerid]]])
|
||||
|
||||
if sql_result:
|
||||
last_airdate = datetime.date.fromordinal(sql_result[0]['airdate'])
|
||||
if last_airdate >= (update_date - graceperiod) and last_airdate <= (update_date + graceperiod):
|
||||
return True
|
||||
last_airdate_unknown = int(sql_result[0][0]['airdate']) <= 1 if sql_result and sql_result[0] else True
|
||||
|
||||
# get next upcoming UNAIRED episode to compare against today + graceperiod
|
||||
sql_result = myDB.select(
|
||||
"SELECT * FROM tv_episodes WHERE showid = ? AND season > '0' AND airdate > '1' AND status = '1' ORDER BY airdate ASC LIMIT 1",
|
||||
[cur_indexerid])
|
||||
|
||||
if sql_result:
|
||||
next_airdate = datetime.date.fromordinal(sql_result[0]['airdate'])
|
||||
if next_airdate <= (update_date + graceperiod):
|
||||
return True
|
||||
last_airdate = datetime.date.fromordinal(sql_result[1][0]['airdate']) if sql_result and sql_result[1] else datetime.date.fromordinal(1)
|
||||
|
||||
last_update_indexer = datetime.date.fromordinal(self.last_update_indexer)
|
||||
|
||||
# in the first year after ended (last airdate), update every 30 days
|
||||
if (update_date - last_airdate) < datetime.timedelta(days=450) and (
|
||||
update_date - last_update_indexer) > datetime.timedelta(days=30):
|
||||
# if show is not 'Ended' and last episode aired less then 460 days ago or don't have an airdate for the last episode always update (status 'Continuing' or '')
|
||||
update_days_limit = 460
|
||||
ended_limit = datetime.timedelta(days=update_days_limit)
|
||||
if 'Ended' not in self.status and (last_airdate == datetime.date.fromordinal(1) or last_airdate_unknown or (update_date - last_airdate) <= ended_limit or (update_date - last_update_indexer) > ended_limit):
|
||||
return True
|
||||
|
||||
return False
|
||||
# in the first 460 days (last airdate), update regularly
|
||||
airdate_diff = update_date - last_airdate
|
||||
last_update_diff = update_date - last_update_indexer
|
||||
|
||||
update_step_list = [[60, 1], [120, 3], [180, 7], [365, 15], [update_days_limit, 30]]
|
||||
for date_diff, interval in update_step_list:
|
||||
if airdate_diff <= datetime.timedelta(days=date_diff) and last_update_diff >= datetime.timedelta(days=interval):
|
||||
return True
|
||||
|
||||
# update shows without an airdate for the last episode for 460 days every 7 days
|
||||
if last_airdate_unknown and airdate_diff <= ended_limit and last_update_diff >= datetime.timedelta(days=7):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def writeShowNFO(self):
|
||||
|
||||
|
@ -1009,22 +1001,22 @@ class TVShow(object):
|
|||
logger.log(str(self.indexerid) + u': Parsed latest IMDb show info for [%s]' % self.name)
|
||||
|
||||
def nextEpisode(self):
|
||||
logger.log(str(self.indexerid) + ": Finding the episode which airs next", logger.DEBUG)
|
||||
logger.log(str(self.indexerid) + ': Finding the episode which airs next', logger.DEBUG)
|
||||
|
||||
curDate = datetime.date.today().toordinal()
|
||||
if not self.nextaired or self.nextaired and curDate > self.nextaired:
|
||||
myDB = db.DBConnection()
|
||||
sqlResults = myDB.select(
|
||||
"SELECT airdate, season, episode FROM tv_episodes WHERE showid = ? AND airdate >= ? AND status in (?,?) ORDER BY airdate ASC LIMIT 1",
|
||||
[self.indexerid, datetime.date.today().toordinal(), UNAIRED, WANTED])
|
||||
'SELECT airdate, season, episode FROM tv_episodes WHERE showid = ? AND airdate >= ? AND status in (?,?,?) ORDER BY airdate ASC LIMIT 1',
|
||||
[self.indexerid, datetime.date.today().toordinal(), UNAIRED, WANTED, FAILED])
|
||||
|
||||
if sqlResults == None or len(sqlResults) == 0:
|
||||
logger.log(str(self.indexerid) + u": No episode found... need to implement a show status",
|
||||
logger.log(str(self.indexerid) + u': No episode found... need to implement a show status',
|
||||
logger.DEBUG)
|
||||
self.nextaired = ""
|
||||
self.nextaired = ''
|
||||
else:
|
||||
logger.log(str(self.indexerid) + u": Found episode " + str(sqlResults[0]["season"]) + "x" + str(
|
||||
sqlResults[0]["episode"]), logger.DEBUG)
|
||||
logger.log(str(self.indexerid) + u': Found episode ' + str(sqlResults[0]['season']) + 'x' + str(
|
||||
sqlResults[0]['episode']), logger.DEBUG)
|
||||
self.nextaired = sqlResults[0]['airdate']
|
||||
|
||||
return self.nextaired
|
||||
|
|
|
@ -46,6 +46,7 @@ from sickbeard.scene_numbering import get_scene_numbering, set_scene_numbering,
|
|||
from sickbeard.name_cache import buildNameCache
|
||||
from sickbeard.browser import foldersAtPath
|
||||
from sickbeard.blackandwhitelist import BlackAndWhiteList, short_group_names
|
||||
from sickbeard.searchBacklog import FULL_BACKLOG, LIMITED_BACKLOG
|
||||
from tornado import gen
|
||||
from tornado.web import RequestHandler, authenticated
|
||||
from lib import adba
|
||||
|
@ -2516,6 +2517,7 @@ class Manage(MainHandler):
|
|||
manageMenu = [
|
||||
{'title': 'Backlog Overview', 'path': 'manage/backlogOverview/'},
|
||||
{'title': 'Manage Searches', 'path': 'manage/manageSearches/'},
|
||||
{'title': 'Show Queue Overview', 'path': 'manage/showQueueOverview/'},
|
||||
{'title': 'Episode Status Management', 'path': 'manage/episodeStatuses/'}, ]
|
||||
|
||||
if sickbeard.USE_TORRENTS and sickbeard.TORRENT_METHOD != 'blackhole' \
|
||||
|
@ -3223,8 +3225,9 @@ class ManageSearches(Manage):
|
|||
# t.backlogPI = sickbeard.backlogSearchScheduler.action.getProgressIndicator()
|
||||
t.backlogPaused = sickbeard.searchQueueScheduler.action.is_backlog_paused()
|
||||
t.backlogRunning = sickbeard.searchQueueScheduler.action.is_backlog_in_progress()
|
||||
t.backlogRunningType = sickbeard.searchQueueScheduler.action.type_of_backlog_in_progress()
|
||||
t.recentSearchStatus = sickbeard.searchQueueScheduler.action.is_recentsearch_in_progress()
|
||||
t.findPropersStatus = sickbeard.properFinderScheduler.action.amActive
|
||||
t.findPropersStatus = sickbeard.searchQueueScheduler.action.is_propersearch_in_progress()
|
||||
t.queueLength = sickbeard.searchQueueScheduler.action.queue_length()
|
||||
|
||||
t.submenu = self.ManageMenu()
|
||||
|
@ -3238,23 +3241,36 @@ class ManageSearches(Manage):
|
|||
|
||||
self.redirect('/home/')
|
||||
|
||||
def forceBacklog(self, *args, **kwargs):
|
||||
def forceLimitedBacklog(self, *args, **kwargs):
|
||||
# force it to run the next time it looks
|
||||
result = sickbeard.backlogSearchScheduler.forceRun()
|
||||
if result:
|
||||
logger.log(u'Backlog search forced')
|
||||
ui.notifications.message('Backlog search started')
|
||||
if not sickbeard.searchQueueScheduler.action.is_standard_backlog_in_progress():
|
||||
sickbeard.backlogSearchScheduler.forceSearch(force_type=LIMITED_BACKLOG)
|
||||
logger.log(u'Limited Backlog search forced')
|
||||
ui.notifications.message('Limited Backlog search started')
|
||||
|
||||
time.sleep(5)
|
||||
self.redirect('/manage/manageSearches/')
|
||||
|
||||
def forceFullBacklog(self, *args, **kwargs):
|
||||
# force it to run the next time it looks
|
||||
if not sickbeard.searchQueueScheduler.action.is_standard_backlog_in_progress():
|
||||
sickbeard.backlogSearchScheduler.forceSearch(force_type=FULL_BACKLOG)
|
||||
logger.log(u'Full Backlog search forced')
|
||||
ui.notifications.message('Full Backlog search started')
|
||||
|
||||
time.sleep(5)
|
||||
self.redirect('/manage/manageSearches/')
|
||||
|
||||
def forceSearch(self, *args, **kwargs):
|
||||
|
||||
# force it to run the next time it looks
|
||||
result = sickbeard.recentSearchScheduler.forceRun()
|
||||
if result:
|
||||
logger.log(u'Recent search forced')
|
||||
ui.notifications.message('Recent search started')
|
||||
if not sickbeard.searchQueueScheduler.action.is_recentsearch_in_progress():
|
||||
result = sickbeard.recentSearchScheduler.forceRun()
|
||||
if result:
|
||||
logger.log(u'Recent search forced')
|
||||
ui.notifications.message('Recent search started')
|
||||
|
||||
time.sleep(5)
|
||||
self.redirect('/manage/manageSearches/')
|
||||
|
||||
def forceFindPropers(self, *args, **kwargs):
|
||||
|
@ -3265,6 +3281,7 @@ class ManageSearches(Manage):
|
|||
logger.log(u'Find propers search forced')
|
||||
ui.notifications.message('Find propers search started')
|
||||
|
||||
time.sleep(5)
|
||||
self.redirect('/manage/manageSearches/')
|
||||
|
||||
def pauseBacklog(self, paused=None):
|
||||
|
@ -3273,8 +3290,29 @@ class ManageSearches(Manage):
|
|||
else:
|
||||
sickbeard.searchQueueScheduler.action.unpause_backlog() # @UndefinedVariable
|
||||
|
||||
time.sleep(5)
|
||||
self.redirect('/manage/manageSearches/')
|
||||
|
||||
class showQueueOverview(Manage):
|
||||
def index(self, *args, **kwargs):
|
||||
t = PageTemplate(headers=self.request.headers, file='manage_showQueueOverview.tmpl')
|
||||
t.queueLength = sickbeard.showQueueScheduler.action.queue_length()
|
||||
t.showList = sickbeard.showList
|
||||
t.ShowUpdateRunning = sickbeard.showQueueScheduler.action.isShowUpdateRunning()
|
||||
|
||||
t.submenu = self.ManageMenu()
|
||||
|
||||
return t.respond()
|
||||
|
||||
def forceShowUpdate(self, *args, **kwargs):
|
||||
|
||||
result = sickbeard.showUpdateScheduler.forceRun()
|
||||
if result:
|
||||
logger.log(u'Show Update forced')
|
||||
ui.notifications.message('Forced Show Update started')
|
||||
|
||||
time.sleep(5)
|
||||
self.redirect('/manage/showQueueOverview/')
|
||||
|
||||
class History(MainHandler):
|
||||
def index(self, limit=100):
|
||||
|
|
|
@ -83,6 +83,7 @@ class WebServer(threading.Thread):
|
|||
(r'%s/home/postprocess(/?.*)' % self.options['web_root'], webserve.HomePostProcess),
|
||||
(r'%s/home(/?.*)' % self.options['web_root'], webserve.Home),
|
||||
(r'%s/manage/manageSearches(/?.*)' % self.options['web_root'], webserve.ManageSearches),
|
||||
(r'%s/manage/showQueueOverview(/?.*)' % self.options['web_root'], webserve.showQueueOverview),
|
||||
(r'%s/manage/(/?.*)' % self.options['web_root'], webserve.Manage),
|
||||
(r'%s/ui(/?.*)' % self.options['web_root'], webserve.UI),
|
||||
(r'%s/browser(/?.*)' % self.options['web_root'], webserve.WebFileBrowser),
|
||||
|
|
Loading…
Reference in a new issue