Merge pull request #772 from JackDandy/feature/ReduceNewznabHits
Add smart logic to reduce api hits to newznab server types and improv…
28
CHANGES.md
|
@ -129,6 +129,34 @@
|
|||
* Add indicator for public access search providers
|
||||
* Change improve probability selecting most seeded release
|
||||
* Change add the TorrentDay x265 category to search
|
||||
* Add smart logic to reduce api hits to newznab server types and improve how nzbs are downloaded
|
||||
* Add newznab smart logic to avoid missing releases when there are a great many recent releases
|
||||
* Change improve performance by using newznab server advertised capabilities
|
||||
* Change config/providers newznab to display only non-default categories
|
||||
* Change use scene season for wanted segment in backlog if show is scene numbering
|
||||
* Change combine Manage Searches / Backlog Search / Limited and Full to Force
|
||||
* Change consolidate limited and full backlog
|
||||
* Change config / Search / Backlog search frequency to instead spread backlog searches over a number of days
|
||||
* Change migrate minimum used value for search frequency into new minimum 7 for search spread
|
||||
* Change restrict nzb providers to 1 backlog batch run per day
|
||||
* Add to Config/Search/Unaired episodes/Allow episodes that are released early
|
||||
* Add to Config/Search/Unaired episodes/Use specific api requests to search for early episode releases
|
||||
* Add use related ids for newznab searches to increase search efficiency
|
||||
* Add periodic update of related show ids
|
||||
* Change terminology Edit Show/"Post processing" tab name to "Other"
|
||||
* Add advanced feature "Related show IDs" to Edit Show/Other used for finding episodes and TV info
|
||||
* Add search info source image links to those that have zero id under Edit Show/Other/"Related show IDs"
|
||||
* Add "set master" button to Edit Show/Other/"Related show IDs" for info source that can be changed
|
||||
* Change displayShow terminology "Indexers" to "Links" to cover internal and web links
|
||||
* Change add related show info sources on displayShow page
|
||||
* Change don't display "temporarily" defunct TVRage image link on displayShow pages unless it is master info source
|
||||
* Change if a defunct info source is the master of a show then present a link on displayShow to edit related show IDs
|
||||
* Change simplify the next backlog search run time display in the page footer
|
||||
* Change try ssl when fetching data thetvdb, imdb, trakt, scene exception
|
||||
* Change improve reliability to Trakt notifier by using show related id support
|
||||
* Change improve config/providers newznab categories layout
|
||||
* Change show loaded log message at start up and include info source
|
||||
* Change if episode has no airdate then set status to unaired (was skipped)
|
||||
|
||||
[develop changelog]
|
||||
* Change send nzb data to NZBGet for Anizb instead of url
|
||||
|
|
|
@ -4,6 +4,7 @@ Libs with customisations...
|
|||
/lib/dateutil/zoneinfo/__init__.py
|
||||
/lib/hachoir_core/config.py
|
||||
/lib/hachoir_core/stream/input_helpers.py
|
||||
/lib/lockfile/mkdirlockfile.py
|
||||
/lib/pynma/pynma.py
|
||||
/lib/requests/packages/urllib3/connectionpool.py
|
||||
/lib/requests/packages/urllib3/util/ssl_.py
|
||||
|
|
BIN
gui/slick/images/fanart.png
Normal file
After Width: | Height: | Size: 436 B |
BIN
gui/slick/images/imdb16.png
Normal file
After Width: | Height: | Size: 322 B |
Before Width: | Height: | Size: 172 B After Width: | Height: | Size: 323 B |
BIN
gui/slick/images/tmdb16.png
Normal file
After Width: | Height: | Size: 263 B |
BIN
gui/slick/images/trakt16.png
Normal file
After Width: | Height: | Size: 667 B |
BIN
gui/slick/images/tvmaze16.png
Normal file
After Width: | Height: | Size: 900 B |
|
@ -645,15 +645,27 @@ name = '' if not client else get_client_instance(sickbeard.TORRENT_METHOD)().nam
|
|||
|
||||
<div class="field-pair">
|
||||
<label>
|
||||
<span class="component-title">Search categories</span>
|
||||
<span class="component-title">Extra categories to search
|
||||
<span style="font-weight:normal">with.. (u)hd, sd, sport, anime</span>
|
||||
</span>
|
||||
<span class="component-desc">
|
||||
<select id="newznab_cap" multiple="multiple" style="min-width:10em;" ></select>
|
||||
<select id="newznab_cat" multiple="multiple" style="min-width:10em;" ></select>
|
||||
<div class="clear-left">
|
||||
<p>select newznab categories to search on the left then "Update Categories"<br />
|
||||
<b>remember</b> to "Save Changes"!</p>
|
||||
<div id="nn-cats">
|
||||
<div class="pull-left">
|
||||
<select class="pull-left" id="newznab_cap" multiple="multiple" style="min-width:10em;min-height:72px"></select>
|
||||
<input class="btn" type="button" class="newznab_cat_update" id="newznab_cat_update" value=">>" style="position:relative;bottom:-15px">
|
||||
</div>
|
||||
<select id="newznab_cat" multiple="multiple" style="min-width:7em;min-height:72px"></select>
|
||||
<div class="clear-left">
|
||||
<p>multi-select newznab categories on the left<br />
|
||||
then click ">>" and finally "Save Changes"</p>
|
||||
</div>
|
||||
</div>
|
||||
<div id="nn-nocats" class="hide">
|
||||
<span>No extra categories found</span>
|
||||
</div>
|
||||
<div id="nn-loadcats" class="hide">
|
||||
<span>Loading categories...</span>
|
||||
</div>
|
||||
<div class="clear-left"><input class="btn" type="button" class="newznab_cat_update" id="newznab_cat_update" value="Update Categories" /></div>
|
||||
</span>
|
||||
</label>
|
||||
</div>
|
||||
|
|
|
@ -94,10 +94,10 @@
|
|||
|
||||
<div class="field-pair">
|
||||
<label>
|
||||
<span class="component-title">Backlog search frequency</span>
|
||||
<span class="component-title">Backlog search spread</span>
|
||||
<span class="component-desc">
|
||||
<input type="text" name="backlog_frequency" value="$sickbeard.BACKLOG_FREQUENCY" class="form-control input-sm input75">
|
||||
<p>days between full backlog searches (min $sickbeard.MIN_BACKLOG_FREQUENCY, default $sickbeard.DEFAULT_BACKLOG_FREQUENCY, max $sickbeard.MAX_BACKLOG_FREQUENCY)</p>
|
||||
<p>days to spread full backlog searches over (min $sickbeard.MIN_BACKLOG_FREQUENCY, default $sickbeard.DEFAULT_BACKLOG_FREQUENCY, max $sickbeard.MAX_BACKLOG_FREQUENCY)</p>
|
||||
</span>
|
||||
</label>
|
||||
</div>
|
||||
|
@ -171,10 +171,20 @@
|
|||
<span class="component-title">Unaired episodes</span>
|
||||
<span class="component-desc">
|
||||
<input type="checkbox" name="search_unaired" id="search_unaired" class="enabler"<%= html_checked if sickbeard.SEARCH_UNAIRED == True else '' %>>
|
||||
<p>process episodes found before their expected airdate (disable if getting fakes)</p>
|
||||
<p>allow episodes that are released early (disable if getting fakes)</p>
|
||||
</span>
|
||||
</label>
|
||||
</div>
|
||||
<div id="content_search_unaired">
|
||||
<div class="field-pair" style="margin-top:-24px">
|
||||
<label for="unaired_recent_search_only">
|
||||
<span class="component-desc">
|
||||
<input type="checkbox" name="unaired_recent_search_only" id="unaired_recent_search_only" class="enabler"<%= html_checked if sickbeard.UNAIRED_RECENT_SEARCH_ONLY == False else '' %>>
|
||||
<p>use specific server api requests to search for early episode releases</p>
|
||||
</span>
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<input type="submit" class="btn config_submitter" value="Save Changes">
|
||||
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
#from sickbeard.common import *
|
||||
#from sickbeard.helpers import anon_url
|
||||
#from lib import subliminal
|
||||
#from sickbeard.indexers.indexer_config import INDEXER_TVDB, INDEXER_IMDB
|
||||
##
|
||||
#set global $title = $show.name
|
||||
#set global $topmenu = 'home'
|
||||
|
@ -45,7 +46,7 @@
|
|||
$('.addQTip').each(function () {
|
||||
$(this).css({'cursor':'help', 'text-shadow':'0px 0px 0.5px #666'});
|
||||
$(this).qtip({
|
||||
show: {solo:true},
|
||||
show: {solo:!0},
|
||||
position: {viewport:$(window), my:'left center', adjust:{ y: -10, x: 2 }},
|
||||
style: {classes:'qtip-rounded qtip-shadow qtip-maxwidth'}
|
||||
});
|
||||
|
@ -182,15 +183,34 @@
|
|||
<div id="details-wrapper">
|
||||
<div id="details-right">
|
||||
<div>
|
||||
<span class="details-title">Indexers</span>
|
||||
<span class="details-title">Links</span>
|
||||
<span class="details-info">
|
||||
#set $_show = $show
|
||||
#if $sickbeard.USE_IMDB_INFO and $show.imdbid
|
||||
<a class="service" href="<%= anon_url('http://www.imdb.com/title/', _show.imdbid) %>" rel="noreferrer" onclick="window.open(this.href, '_blank'); return false;" title="Show IMDb info in new tab"><img alt="[imdb]" height="16" width="16" src="$sbRoot/images/imdb.png" /></a>
|
||||
#end if
|
||||
<a class="service" href="<%= anon_url(sickbeard.indexerApi(_show.indexer).config['show_url'], _show.indexerid) %>" onclick="window.open(this.href, '_blank'); return false;" title="Show $sickbeard.indexerApi($show.indexer).name info in new tab"><img alt="$sickbeard.indexerApi($show.indexer).name" height="16" width="16" src="$sbRoot/images/$sickbeard.indexerApi($show.indexer).config['icon']" /></a>
|
||||
#set $tvdb_id = None
|
||||
#for $src_id, $src_name in $sickbeard.indexerApi().all_indexers.iteritems()
|
||||
#if sickbeard.indexerApi($src_id).config.get('defunct') and $src_id != $show.indexer
|
||||
#continue
|
||||
#end if
|
||||
#if $src_id in $show.ids and $show.ids[$src_id].get('id', 0) > 0 and $sickbeard.indexermapper.MapStatus.NOT_FOUND != $show.ids[$src_id]['status']
|
||||
#if $INDEXER_TVDB == $src_id
|
||||
#set $tvdb_id = $show.ids[$src_id]['id']
|
||||
#end if
|
||||
#if $INDEXER_IMDB == $src_id and not $sickbeard.USE_IMDB_INFO
|
||||
#continue
|
||||
#end if
|
||||
#if not sickbeard.indexerApi($src_id).config.get('defunct')
|
||||
<a class="service" href="$anon_url(sickbeard.indexerApi($src_id).config['show_url'], $show.ids[$src_id]['id'])" onclick="window.open(this.href, '_blank'); return !1;" title="View $src_name info in new tab">
|
||||
#else#
|
||||
<a class="service" href="$sbRoot/home/editShow?show=$show.indexerid#core-component-group3" title="Edit related show IDs">
|
||||
#end if#
|
||||
<img alt="$src_name" height="16" width="16" src="$sbRoot/images/$sickbeard.indexerApi($src_id).config['icon']" />
|
||||
</a>
|
||||
#end if
|
||||
#end for
|
||||
##if $tvdb_id
|
||||
## <a class="service" href="$anon_url('https://fanart.tv/series/', $tvdb_id)" onclick="window.open(this.href, '_blank'); return !1;" title="View Fanart.tv info in new tab"><img alt="Fanart.tv" height="16" width="16" src="$sbRoot/images/fanart.png" /></a>
|
||||
##end if
|
||||
#if $xem_numbering or $xem_absolute_numbering
|
||||
<a class="service" href="<%= anon_url('http://thexem.de/search?q=', _show.name) %>" rel="noreferrer" onclick="window.open(this.href, '_blank'); return false;" title="Show XEM info in new tab"><img alt="[xem]" height="16" width="16" src="$sbRoot/images/xem.png" /></a>
|
||||
<a class="service" href="$anon_url('http://thexem.de/search?q=', $show.name)" rel="noreferrer" onclick="window.open(this.href, '_blank'); return !1;" title="View XEM info in new tab"><img alt="[xem]" height="16" width="16" src="$sbRoot/images/xem.png" /></a>
|
||||
#end if
|
||||
</span>
|
||||
</div>
|
||||
|
@ -399,7 +419,7 @@
|
|||
#if 0 == len($sqlResults)
|
||||
<div style="margin-top:50px">
|
||||
<h3>Episodes do not exist for this show at the associated indexer
|
||||
<a class="service" href="<%= anon_url(sickbeard.indexerApi(_show.indexer).config['show_url'], _show.indexerid) %>" onclick="window.open(this.href, '_blank'); return false;" title="Show $sickbeard.indexerApi($show.indexer).name info in new tab">$sickbeard.indexerApi($show.indexer).name</a>
|
||||
<a class="service" href="$anon_url(sickbeard.indexerApi($show.indexer).config['show_url'], $show.indexerid)" onclick="window.open(this.href, '_blank'); return !1;" title="View $sickbeard.indexerApi($show.indexer).name info in new tab">$sickbeard.indexerApi($show.indexer).name</a>
|
||||
</h3>
|
||||
</div>
|
||||
#else:
|
||||
|
|
|
@ -3,6 +3,8 @@
|
|||
#from sickbeard import common
|
||||
#from sickbeard import exceptions
|
||||
#from sickbeard import scene_exceptions
|
||||
#from sickbeard.helpers import anon_url
|
||||
#from sickbeard.indexers.indexer_config import INDEXER_TVDB
|
||||
#import sickbeard.blackandwhitelist
|
||||
##
|
||||
#set global $title = 'Edit ' + $show.name
|
||||
|
@ -12,6 +14,7 @@
|
|||
#set global $page_body_attr = 'edit-show'
|
||||
##
|
||||
#import os.path
|
||||
#from urllib import quote_plus
|
||||
#include $os.path.join($sickbeard.PROG_DIR, 'gui/slick/interfaces/default/inc_top.tmpl')
|
||||
|
||||
<script type="text/javascript" src="$sbRoot/js/qualityChooser.js?v=$sbPID"></script>
|
||||
|
@ -31,13 +34,14 @@
|
|||
<div id="config">
|
||||
<div id="config-content" class="linefix container">
|
||||
<form action="editShow" method="post" id="editShow" style="width:894px">
|
||||
<input type="hidden" name="show" value="$show.indexerid">
|
||||
<input type="hidden" name="show" id="show" value="$show.indexerid">
|
||||
<input type="hidden" name="indexer" id="indexer" value="$show.indexer">
|
||||
|
||||
<div id="config-components">
|
||||
<ul>
|
||||
<li><a href="#core-component-group1">Common</a></li>
|
||||
<li><a href="#core-component-group2">Search</a></li>
|
||||
<li><a href="#core-component-group3">Post-Processing</a></li>
|
||||
<li><a href="#core-component-group3">Other</a></li>
|
||||
</ul>
|
||||
|
||||
<div id="core-component-group1" class="component-group">
|
||||
|
@ -245,6 +249,89 @@
|
|||
</label>
|
||||
</div>
|
||||
|
||||
<div class="field-pair">
|
||||
<label for="idmapping">
|
||||
<span class="component-title">Related show IDs</span>
|
||||
<span class="component-desc">
|
||||
<label for="idmapping">
|
||||
<input id="idmapping" type="checkbox">
|
||||
<span>TV info source IDs (advanced use only)</span>
|
||||
</label>
|
||||
</span>
|
||||
</label>
|
||||
</div>
|
||||
|
||||
#set $dev = True
|
||||
#set $dev = None
|
||||
<div id="idmapping-options" style="display:#if $dev#--#end if#none">
|
||||
#set $is_master_settable = False
|
||||
#for $src_id, $src_name in $sickbeard.indexerApi().all_indexers.iteritems()
|
||||
#set $is_master_settable |= ($dev or
|
||||
($src_id != $show.indexer and $show.ids[$src_id].get('id', 0) > 0 and
|
||||
$src_id in $sickbeard.indexerApi().indexers and not $sickbeard.indexerApi($src_id).config.get('defunct') and
|
||||
$sickbeard.indexerApi($src_id).config.get('active')))
|
||||
#if $is_master_settable
|
||||
#break
|
||||
#end if
|
||||
#end for
|
||||
#set $search_name = quote_plus($sickbeard.indexermapper.clean_show_name($show.name))
|
||||
#for $src_id, $src_name in $sickbeard.indexerApi().all_indexers.iteritems()
|
||||
#set $ok_src_id = $show.ids[$src_id].get('id', 0) > 0
|
||||
#set $maybe_master = ($src_id != $show.indexer and
|
||||
$src_id in $sickbeard.indexerApi().indexers and not $sickbeard.indexerApi($src_id).config.get('defunct') and
|
||||
$sickbeard.indexerApi($src_id).config.get('active'))
|
||||
#set $settable_master = ($dev or ($ok_src_id and $maybe_master))
|
||||
<div class="field-pair" style="padding:0">
|
||||
<span class="component-title">
|
||||
#if $src_id in $show.ids
|
||||
#set $src_search_url = sickbeard.indexerApi($src_id).config.get('finder')
|
||||
#set $use_search_url = $src_search_url
|
||||
#set $data_link = 'data-'
|
||||
#if $ok_src_id and $sickbeard.indexermapper.MapStatus.NOT_FOUND != $show.ids[$src_id]['status']
|
||||
#set $data_link = ''
|
||||
#set $use_search_url = False
|
||||
#end if
|
||||
<a id="src-mid-$src_id" class="service" style="margin-right:6px" data-search="#if $use_search_url#y#else#n#end if#" #if $src_search_url#data-search-href="$anon_url($src_search_url % $search_name)" data-search-onclick="window.open(this.href, '_blank'); return !1;" data-search-title="Search for show at $src_name" #end if##if $use_search_url#href="$anon_url($src_search_url % $search_name)" onclick="window.open(this.href, '_blank'); return !1;" title="Search for show at $src_name" #end if#$(data_link)href="$anon_url(sickbeard.indexerApi($src_id).config['show_url'], $show.ids[$src_id]['id'])" $(data_link)onclick="window.open(this.href, '_blank'); return !1;" $(data_link)title="View $src_name info in new tab"><img alt="$src_name" height="16" width="16" src="$sbRoot/images/$sickbeard.indexerApi($src_id).config['icon']" /></a>
|
||||
#end if
|
||||
$src_name
|
||||
</span>
|
||||
<span class="component-desc">
|
||||
<input type="text" data-maybe-master="#echo ('0', '1')[bool($maybe_master)]#" name="mid-$src_id" id="#echo ('mid-%s' % $src_id, 'source-id')[$src_id == $show.indexer]#" value="$show.ids.get($src_id, {'id': 0}).get('id')" class="form-control form-control-inline input-sm" #echo ('', $html_disabled)[$src_id == $show.indexer]#>
|
||||
#if $src_id != $show.indexer
|
||||
<label for="lockid-$src_id">
|
||||
<input type="checkbox" name="lockid-$src_id" id="lockid-$src_id"#echo ('', $html_checked)[$show.ids.get($src_id, {'status': $sickbeard.indexermapper.MapStatus.NONE}).get('status') == $sickbeard.indexermapper.MapStatus.NO_AUTOMATIC_CHANGE]#>
|
||||
<p style="padding-left:19px;margin-right:24px">lock this ID</p>
|
||||
</label>
|
||||
#if $settable_master
|
||||
<label for="set-master-$src_id">
|
||||
<input type="radio" name="set-master" id="set-master-$src_id" data-indexer="$src_id" data-indexerid="$show.ids[$src_id].get('id', 0)">set master
|
||||
</label>
|
||||
#end if
|
||||
#else
|
||||
<label for="the-master">
|
||||
#if $is_master_settable
|
||||
<input type="radio" name="set-master" id="the-master" checked>
|
||||
#end if
|
||||
<p#if $is_master_settable# style="padding-left:19px"#end if#>locked master, can't be edited</p>
|
||||
</label>
|
||||
#end if
|
||||
</span>
|
||||
</div>
|
||||
#end for
|
||||
<div class="field-pair" style="padding-top:0">
|
||||
<span id="panel-save-get" class="component-desc show">
|
||||
<p>invalid values can break finding episode and TV info</p>
|
||||
<input type="button" value="Save Changes" id="save-mapping" class="btn btn-inline">
|
||||
<p style="float:left;margin-right:6px">or</p>
|
||||
<input type="button" value="Get Defaults" id="reset-mapping" class="btn btn-inline">
|
||||
<p>for unlocked IDs</p>
|
||||
</span>
|
||||
<span id="save-wait" class="component-desc hide">
|
||||
<span><img src="$sbRoot/images/loading16#echo ('', '-dark')['dark' == $sickbeard.THEME_NAME]#.gif" style="margin-right:6px">Saving...</span>
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
</div><!-- /component-group3 //-->
|
||||
|
||||
</div>
|
||||
|
|
|
@ -50,6 +50,16 @@
|
|||
#except NotFound
|
||||
#set $localheader = ''
|
||||
#end try
|
||||
<%
|
||||
try:
|
||||
next_backlog_timeleft = str(sickbeard.backlogSearchScheduler.next_backlog_timeleft()).split('.')[0]
|
||||
except AttributeError:
|
||||
next_backlog_timeleft = 'soon'
|
||||
try:
|
||||
recent_search_timeleft = str(sickbeard.recentSearchScheduler.timeLeft()).split('.')[0]
|
||||
except AttributeError:
|
||||
recent_search_timeleft = 'soon'
|
||||
%>
|
||||
##
|
||||
<span class="footerhighlight">$shows_total</span> shows (<span class="footerhighlight">$shows_active</span> active)
|
||||
| <span class="footerhighlight">$ep_downloaded</span><%=
|
||||
|
@ -60,10 +70,8 @@
|
|||
% (localRoot, str(ep_snatched))
|
||||
)[0 < ep_snatched]
|
||||
%> / <span class="footerhighlight">$ep_total</span> episodes downloaded $ep_percentage
|
||||
| recent search: <span class="footerhighlight"><%= str(sickbeard.recentSearchScheduler.timeLeft()).split('.')[0] %></span>
|
||||
| backlog search: <span class="footerhighlight"><%= str(sickbeard.backlogSearchScheduler.timeLeft()).split('.')[0] %></span>
|
||||
| full backlog: <span class="footerhighlight"><%= sbdatetime.sbdatetime.sbfdate(sickbeard.backlogSearchScheduler.nextRun()) %>
|
||||
</span>
|
||||
| recent search: <span class="footerhighlight">$recent_search_timeleft</span>
|
||||
| backlog search: <span class="footerhighlight">$next_backlog_timeleft</span>
|
||||
</div>
|
||||
</footer>
|
||||
</body>
|
||||
|
|
|
@ -19,14 +19,13 @@
|
|||
|
||||
<div id="summary2" class="align-left">
|
||||
<h3>Backlog Search:</h3>
|
||||
<a id="forcebacklog" class="btn#if $standardBacklogRunning# disabled#end if#" href="$sbRoot/manage/manageSearches/forceLimitedBacklog"><i class="sgicon-play"></i> Force Limited</a>
|
||||
<a id="forcefullbacklog" class="btn#if $standardBacklogRunning# disabled#end if#" href="$sbRoot/manage/manageSearches/forceFullBacklog"><i class="sgicon-play"></i> Force Full</a>
|
||||
<a id="forcebacklog" class="btn#if $standardBacklogRunning or $backlogIsActive# disabled#end if#" href="$sbRoot/manage/manageSearches/forceBacklog"><i class="sgicon-play"></i> Force</a>
|
||||
<a id="pausebacklog" class="btn" href="$sbRoot/manage/manageSearches/pauseBacklog?paused=#if $backlogPaused then "0" else "1"#"><i class="#if $backlogPaused then "sgicon-play" else "sgicon-pause"#"></i> #if $backlogPaused then "Unpause" else "Pause"#</a>
|
||||
#if $backlogPaused then 'Paused: ' else ''#
|
||||
#if not $backlogRunning:
|
||||
#if not $backlogRunning and not $backlogIsActive:
|
||||
Not in progress<br />
|
||||
#else
|
||||
Currently running ($backlogRunningType)<br />
|
||||
Currently running#if $backlogRunningType != "None"# ($backlogRunningType)#end if#<br />
|
||||
#end if
|
||||
<br />
|
||||
|
||||
|
@ -68,13 +67,16 @@ Backlog: <i>$len($queueLength['backlog']) item$sickbeard.helpers.maybe_plural($l
|
|||
#set $row = 0
|
||||
#for $cur_item in $queueLength['backlog']:
|
||||
#set $search_type = 'On Demand'
|
||||
#if $cur_item[3]:
|
||||
#if $cur_item[5]:
|
||||
#if $cur_item['standard_backlog']:
|
||||
#if $cur_item['forced']:
|
||||
#set $search_type = 'Forced'
|
||||
#else
|
||||
#set $search_type = 'Scheduled'
|
||||
#end if
|
||||
#if $cur_item[4]:
|
||||
#if $cur_item['torrent_only']:
|
||||
#set $search_type += ', Torrent Only'
|
||||
#end if
|
||||
#if $cur_item['limited_backlog']:
|
||||
#set $search_type += ' (Limited)'
|
||||
#else
|
||||
#set $search_type += ' (Full)'
|
||||
|
@ -82,7 +84,7 @@ Backlog: <i>$len($queueLength['backlog']) item$sickbeard.helpers.maybe_plural($l
|
|||
#end if
|
||||
<tr class="#echo ('odd', 'even')[$row % 2]##set $row+=1#">
|
||||
<td style="width:80%;text-align:left;color:white">
|
||||
<a class="whitelink" href="$sbRoot/home/displayShow?show=$cur_item[0]">$cur_item[1]</a> - $sickbeard.helpers.make_search_segment_html_string($cur_item[2])
|
||||
<a class="whitelink" href="$sbRoot/home/displayShow?show=$cur_item['indexerid']">$cur_item['name']</a> - $sickbeard.helpers.make_search_segment_html_string($cur_item['segment'])
|
||||
</td>
|
||||
<td style="width:20%;text-align:center;color:white">$search_type</td>
|
||||
</tr>
|
||||
|
@ -103,7 +105,7 @@ Manual: <i>$len($queueLength['manual']) item$sickbeard.helpers.maybe_plural($len
|
|||
#for $cur_item in $queueLength['manual']:
|
||||
<tr class="#echo ('odd', 'even')[$row % 2]##set $row+=1#">
|
||||
<td style="width:100%;text-align:left;color:white">
|
||||
<a class="whitelink" href="$sbRoot/home/displayShow?show=$cur_item[0]">$cur_item[1]</a> - $sickbeard.helpers.make_search_segment_html_string($cur_item[2])
|
||||
<a class="whitelink" href="$sbRoot/home/displayShow?show=$cur_item['indexerid']">$cur_item['name']</a> - $sickbeard.helpers.make_search_segment_html_string($cur_item['segment'])
|
||||
</td>
|
||||
</tr>
|
||||
#end for
|
||||
|
@ -123,7 +125,7 @@ Failed: <i>$len($queueLength['failed']) item$sickbeard.helpers.maybe_plural($len
|
|||
#for $cur_item in $queueLength['failed']:
|
||||
<tr class="#echo ('odd', 'even')[$row % 2]##set $row+=1#">
|
||||
<td style="width:100%;text-align:left;color:white">
|
||||
<a class="whitelink" href="$sbRoot/home/displayShow?show=$cur_item[0]">$cur_item[1]</a> - $sickbeard.helpers.make_search_segment_html_string($cur_item[2])
|
||||
<a class="whitelink" href="$sbRoot/home/displayShow?show=$cur_item['indexerid']">$cur_item['name']</a> - $sickbeard.helpers.make_search_segment_html_string($cur_item['segment'])
|
||||
</td>
|
||||
</tr>
|
||||
#end for
|
||||
|
|
|
@ -40,10 +40,10 @@ Add: <i>$len($queueLength['add']) show$sickbeard.helpers.maybe_plural($len($queu
|
|||
<tbody>
|
||||
#set $row = 0
|
||||
#for $cur_show in $queueLength['add']:
|
||||
#set $show_name = str($cur_show[0])
|
||||
#set $show_name = str($cur_show['name'])
|
||||
<tr class="#echo ('odd', 'even')[$row % 2]##set $row+=1#">
|
||||
<td style="width:80%;text-align:left;color:white">$show_name</td>
|
||||
<td style="width:20%;text-align:center;color:white">#if $cur_show[1]#Scheduled#end if#</td>
|
||||
<td style="width:20%;text-align:center;color:white">#if $cur_show['scheduled_update']#Scheduled#end if#</td>
|
||||
</tr>
|
||||
#end for
|
||||
</tbody>
|
||||
|
@ -60,13 +60,13 @@ Update <span class="grey-text">(Forced / Forced Web)</span>: <i>$len($queueLengt
|
|||
<tbody>
|
||||
#set $row = 0
|
||||
#for $cur_show in $queueLength['update']:
|
||||
#set $show = $findCertainShow($showList, $cur_show[0])
|
||||
#set $show_name = $show.name if $show else str($cur_show[0])
|
||||
#set $show = $findCertainShow($showList, $cur_show['indexerid'])
|
||||
#set $show_name = $show.name if $show else str($cur_show['name'])
|
||||
<tr class="#echo ('odd', 'even')[$row % 2]##set $row+=1#">
|
||||
<td style="width:80%;text-align:left">
|
||||
<a class="whitelink" href="$sbRoot/home/displayShow?show=$cur_show[0]">$show_name</a>
|
||||
<a class="whitelink" href="$sbRoot/home/displayShow?show=$cur_show['indexerid']">$show_name</a>
|
||||
</td>
|
||||
<td style="width:20%;text-align:center;color:white">#if $cur_show[1]#Scheduled, #end if#$cur_show[2]</td>
|
||||
<td style="width:20%;text-align:center;color:white">#if $cur_show['scheduled_update']#Scheduled, #end if#$cur_show['update_type']</td>
|
||||
</tr>
|
||||
#end for
|
||||
</tbody>
|
||||
|
@ -83,13 +83,13 @@ Refresh: <i>$len($queueLength['refresh']) show$sickbeard.helpers.maybe_plural($l
|
|||
<tbody>
|
||||
#set $row = 0
|
||||
#for $cur_show in $queueLength['refresh']:
|
||||
#set $show = $findCertainShow($showList, $cur_show[0])
|
||||
#set $show_name = $show.name if $show else str($cur_show[0])
|
||||
#set $show = $findCertainShow($showList, $cur_show['indexerid'])
|
||||
#set $show_name = $show.name if $show else str($cur_show['name'])
|
||||
<tr class="#echo ('odd', 'even')[$row % 2]##set $row+=1#">
|
||||
<td style="width:80%;text-align:left">
|
||||
<a class="whitelink" href="$sbRoot/home/displayShow?show=$cur_show[0]">$show_name</a>
|
||||
<a class="whitelink" href="$sbRoot/home/displayShow?show=$cur_show['indexerid']">$show_name</a>
|
||||
</td>
|
||||
<td style="width:20%;text-align:center;color:white">#if $cur_show[1]#Scheduled#end if#</td>
|
||||
<td style="width:20%;text-align:center;color:white">#if $cur_show['scheduled_update']#Scheduled#end if#</td>
|
||||
</tr>
|
||||
#end for
|
||||
</tbody>
|
||||
|
@ -107,13 +107,13 @@ Rename: <i>$len($queueLength['rename']) show$sickbeard.helpers.maybe_plural($len
|
|||
<tbody>
|
||||
#set $row = 0
|
||||
#for $cur_show in $queueLength['rename']:
|
||||
#set $show = $findCertainShow($showList, $cur_show[0])
|
||||
#set $show_name = $show.name if $show else str($cur_show[0])
|
||||
#set $show = $findCertainShow($showList, $cur_show['indexerid'])
|
||||
#set $show_name = $show.name if $show else str($cur_show['name'])
|
||||
<tr class="#echo ('odd', 'even')[$row % 2]##set $row+=1#">
|
||||
<td style="width:80%;text-align:left">
|
||||
<a class="whitelink" href="$sbRoot/home/displayShow?show=$cur_show[0]">$show_name</a>
|
||||
<a class="whitelink" href="$sbRoot/home/displayShow?show=$cur_show['indexerid']">$show_name</a>
|
||||
</td>
|
||||
<td style="width:20%;text-align:center;color:white">#if $cur_show[1]#Scheduled#end if#</td>
|
||||
<td style="width:20%;text-align:center;color:white">#if $cur_show['scheduled_update']#Scheduled#end if#</td>
|
||||
</tr>
|
||||
#end for
|
||||
</tbody>
|
||||
|
@ -131,13 +131,13 @@ Rename: <i>$len($queueLength['rename']) show$sickbeard.helpers.maybe_plural($len
|
|||
<tbody>
|
||||
#set $row = 0
|
||||
#for $cur_show in $queueLength['subtitle']:
|
||||
#set $show = $findCertainShow($showList, $cur_show[0])
|
||||
#set $show_name = $show.name if $show else str($cur_show[0])
|
||||
#set $show = $findCertainShow($showList, $cur_show['indexerid'])
|
||||
#set $show_name = $show.name if $show else str($cur_show['name'])
|
||||
<tr class="#echo ('odd', 'even')[$row % 2]##set $row+=1#">
|
||||
<td style="width:80%;text-align:left">
|
||||
<a class="whitelink" href="$sbRoot/home/displayShow?show=$cur_show[0]">$show_name</a>
|
||||
<a class="whitelink" href="$sbRoot/home/displayShow?show=$cur_show['indexerid']">$show_name</a>
|
||||
</td>
|
||||
<td style="width:20%;text-align:center;color:white">#if $cur_show[1]#Scheduled#end if#</td>
|
||||
<td style="width:20%;text-align:center;color:white">#if $cur_show['scheduled_update']#Scheduled#end if#</td>
|
||||
</tr>
|
||||
#end for
|
||||
</tbody>
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
$(document).ready(function(){
|
||||
|
||||
$.sgd = !1;
|
||||
$.fn.showHideProviders = function() {
|
||||
$('.providerDiv').each(function(){
|
||||
var providerName = $(this).attr('id');
|
||||
|
@ -40,7 +40,7 @@ $(document).ready(function(){
|
|||
$.getJSON(sbRoot + '/config/providers/getNewznabCategories', params,
|
||||
function(data){
|
||||
updateNewznabCaps( data, selectedProvider );
|
||||
console.debug(data.tv_categories);
|
||||
//console.debug(data.tv_categories);
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -217,7 +217,7 @@ $(document).ready(function(){
|
|||
if (rootObject.name == searchFor) {
|
||||
found = true;
|
||||
}
|
||||
console.log(rootObject.name + ' while searching for: ' + searchFor);
|
||||
//console.log(rootObject.name + ' while searching for: ' + searchFor);
|
||||
});
|
||||
return found;
|
||||
};
|
||||
|
@ -232,25 +232,53 @@ $(document).ready(function(){
|
|||
updateNewznabCaps = function( newzNabCaps, selectedProvider ) {
|
||||
|
||||
if (newzNabCaps && !ifExists($.fn.newznabProvidersCapabilities, selectedProvider[0])) {
|
||||
$.fn.newznabProvidersCapabilities.push({'name' : selectedProvider[0], 'categories' : newzNabCaps.tv_categories});
|
||||
}
|
||||
|
||||
$.fn.newznabProvidersCapabilities.push({
|
||||
'name' : selectedProvider[0],
|
||||
'categories' : newzNabCaps.tv_categories
|
||||
.sort(function(a, b){return a.name > b.name})})
|
||||
}
|
||||
$.sgd && console.log(selectedProvider);
|
||||
//Loop through the array and if currently selected newznab provider name matches one in the array, use it to
|
||||
//update the capabilities select box (on the left).
|
||||
if (selectedProvider[0]) {
|
||||
$.fn.newznabProvidersCapabilities.forEach(function(newzNabCap) {
|
||||
var newCapOptions = [], catName = '', hasCats = false;
|
||||
if ($.fn.newznabProvidersCapabilities.length) {
|
||||
$.fn.newznabProvidersCapabilities.forEach(function (newzNabCap) {
|
||||
$.sgd && console.log('array found:' + (newzNabCap.categories instanceof Array ? 'yes': 'no'));
|
||||
|
||||
if (newzNabCap.name && newzNabCap.name == selectedProvider[0] && newzNabCap.categories instanceof Array) {
|
||||
var newCapOptions = [];
|
||||
newzNabCap.categories.forEach(function(category_set) {
|
||||
newzNabCap.categories.forEach(function (category_set) {
|
||||
if (category_set.id && category_set.name) {
|
||||
newCapOptions.push({value : category_set.id, text : category_set.name + '(' + category_set.id + ')'});
|
||||
};
|
||||
catName = category_set.name.replace(/Docu([^\w]|$)(.*?)/i, 'Documentary$1');
|
||||
newCapOptions.push({
|
||||
value: category_set.id,
|
||||
text: catName + ' (' + category_set.id + ')'
|
||||
});
|
||||
$('#newznab_cap').replaceOptions(newCapOptions);
|
||||
}
|
||||
});
|
||||
};
|
||||
$('#newznab_cap').replaceOptions(newCapOptions);
|
||||
hasCats = !!newCapOptions.length
|
||||
}
|
||||
});
|
||||
$('#nn-loadcats').removeClass('show').addClass('hide');
|
||||
if (hasCats) {
|
||||
$.sgd && console.log('hasCats');
|
||||
$('#nn-nocats').removeClass('show').addClass('hide');
|
||||
$('#nn-cats').removeClass('hide').addClass('show');
|
||||
} else {
|
||||
$.sgd && console.log('noCats');
|
||||
$('#nn-cats').removeClass('show').addClass('hide');
|
||||
$('#nn-nocats').removeClass('hide').addClass('show');
|
||||
}
|
||||
} else {
|
||||
$.sgd && console.log('errCats');
|
||||
// error - no caps
|
||||
$('#nn-cats').removeClass('show').addClass('hide');
|
||||
$('#nn-nocats').removeClass('show').addClass('hide');
|
||||
$('#nn-loadcats').removeClass('hide').addClass('show');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
$.fn.makeNewznabProviderString = function() {
|
||||
|
@ -384,7 +412,7 @@ $(document).ready(function(){
|
|||
});
|
||||
|
||||
$(this).on('click', '#newznab_cat_update', function(){
|
||||
console.debug('Clicked Button');
|
||||
//console.debug('Clicked Button');
|
||||
|
||||
//Maybe check if there is anything selected?
|
||||
$('#newznab_cat option').each(function() {
|
||||
|
@ -400,7 +428,7 @@ $(document).ready(function(){
|
|||
if($(this).attr('selected') == 'selected')
|
||||
{
|
||||
var selected_cat = $(this).val();
|
||||
console.debug(selected_cat);
|
||||
//console.debug(selected_cat);
|
||||
newOptions.push({text: selected_cat, value: selected_cat})
|
||||
};
|
||||
});
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
/** @namespace config.showLang */
|
||||
/** @namespace config.showIsAnime */
|
||||
/*globals $, config, sbRoot, generate_bwlist*/
|
||||
|
||||
$(document).ready(function () {
|
||||
|
@ -87,17 +89,18 @@ $(document).ready(function () {
|
|||
|
||||
$(this).toggle_SceneException();
|
||||
|
||||
var elABD = $('#air_by_date'), elScene = $('#scene'), elSports = $('#sports'), elAnime = $('#anime');
|
||||
var elABD = $('#air_by_date'), elScene = $('#scene'), elSports = $('#sports'), elAnime = $('#anime'),
|
||||
elIdMap = $('#idmapping');
|
||||
|
||||
function uncheck(el){el.prop('checked', !1)}
|
||||
function checked(el){return el.prop('checked')}
|
||||
function uncheck(el) {el.prop('checked', !1)}
|
||||
function checked(el) {return el.prop('checked')}
|
||||
|
||||
function isAnime(){
|
||||
function isAnime() {
|
||||
uncheck(elABD); uncheck(elSports);
|
||||
if (config.showIsAnime){ $('#blackwhitelist').fadeIn('fast', 'linear'); } return !0; }
|
||||
function isScene(){ uncheck(elABD); uncheck(elSports); }
|
||||
function isABD(){ uncheck(elAnime); uncheck(elScene); $('#blackwhitelist').fadeOut('fast', 'linear'); }
|
||||
function isSports(){ uncheck(elAnime); uncheck(elScene); $('#blackwhitelist').fadeOut('fast', 'linear'); }
|
||||
if (config.showIsAnime) { $('#blackwhitelist').fadeIn('fast', 'linear'); } return !0; }
|
||||
function isScene() { uncheck(elABD); uncheck(elSports); }
|
||||
function isABD() { uncheck(elAnime); uncheck(elScene); $('#blackwhitelist').fadeOut('fast', 'linear'); }
|
||||
function isSports() { uncheck(elAnime); uncheck(elScene); $('#blackwhitelist').fadeOut('fast', 'linear'); }
|
||||
|
||||
if (checked(elAnime)) { isAnime(); }
|
||||
if (checked(elScene)) { isScene(); }
|
||||
|
@ -110,8 +113,188 @@ $(document).ready(function () {
|
|||
else
|
||||
$('#blackwhitelist, #anime-options').fadeOut('fast', 'linear');
|
||||
});
|
||||
elIdMap.on('click', function() {
|
||||
var elMapOptions = $('#idmapping-options'), anim = {fast: 'linear'};
|
||||
if (checked(elIdMap))
|
||||
elMapOptions.fadeIn(anim);
|
||||
else
|
||||
elMapOptions.fadeOut(anim);
|
||||
});
|
||||
elScene.on('click', function() { isScene(); });
|
||||
elABD.on('click', function() { isABD(); });
|
||||
elSports.on('click', function() { isSports() });
|
||||
|
||||
function undef(value) {
|
||||
return /undefined/i.test(typeof(value));
|
||||
}
|
||||
|
||||
function updateSrcLinks() {
|
||||
|
||||
var preventSave = !1, search = 'data-search';
|
||||
$('[id^=mid-]').each(function (i, selected) {
|
||||
var elSelected = $(selected),
|
||||
okDigits = !(/[^\d]/.test(elSelected.val()) || ('' == elSelected.val())),
|
||||
service = '#src-' + elSelected.attr('id'),
|
||||
elLock = $('#lockid-' + service.replace(/.*?(\d+)$/, '$1')),
|
||||
elService = $(service),
|
||||
On = 'data-', Off = '', linkOnly = !1, newLink = '';
|
||||
|
||||
if (okDigits) {
|
||||
if (0 < parseInt(elSelected.val(), 10)) {
|
||||
On = ''; Off = 'data-';
|
||||
} else {
|
||||
linkOnly = !0
|
||||
}
|
||||
}
|
||||
$.each(['href', 'title', 'onclick'], function(i, attr) {
|
||||
if ('n' == elService.attr(search)) {
|
||||
elService.attr(On + attr, elService.attr(Off + attr)).removeAttr(Off + attr);
|
||||
}
|
||||
if (linkOnly)
|
||||
elService.attr(attr, elService.attr(search + '-' + attr));
|
||||
elService.attr(search, linkOnly ? 'y' : 'n')
|
||||
});
|
||||
if (('' == Off) && !linkOnly) {
|
||||
preventSave = !0;
|
||||
elSelected.addClass('warning').attr({title: 'Use digits (0-9)'});
|
||||
elLock.prop('disabled', !0);
|
||||
} else {
|
||||
elSelected.removeClass('warning').removeAttr('title');
|
||||
elLock.prop('disabled', !1);
|
||||
if (!undef(elService.attr('href'))) {
|
||||
if (!undef(elService.attr('data-href')) && linkOnly) {
|
||||
newLink = elService.attr(search + '-href');
|
||||
} else {
|
||||
newLink = elService.attr((undef(elService.attr('data-href')) ? '' : 'data-')
|
||||
+ 'href').replace(/(.*?)\d+/, '$1') + elSelected.val();
|
||||
}
|
||||
elService.attr('href', newLink);
|
||||
}
|
||||
}
|
||||
});
|
||||
$('#save-mapping').prop('disabled', preventSave);
|
||||
}
|
||||
|
||||
$('[id^=mid-]').on('input', function() {
|
||||
updateSrcLinks();
|
||||
});
|
||||
|
||||
function saveMapping(paused, markWanted) {
|
||||
var sbutton = $(this), mid = $('[id^=mid-]'), lock = $('[id^=lockid-]'),
|
||||
allf = $('[id^=mid-], [id^=lockid-], #reset-mapping, [name^=set-master]'),
|
||||
radio = $('[name^=set-master]:checked'), isMaster = !radio.length || 'the-master' == radio.attr('id'),
|
||||
panelSaveGet = $('#panel-save-get'), saveWait = $('#save-wait');
|
||||
|
||||
allf.prop('disabled', !0);
|
||||
sbutton.prop('disabled', !0);
|
||||
var param = {'show': $('#show').val()};
|
||||
mid.each(function (i, selected) {
|
||||
param[$(selected).attr('id')] = $(selected).val();
|
||||
});
|
||||
lock.each(function (i, selected) {
|
||||
param[$(selected).attr('id')] = $(selected).prop('checked');
|
||||
});
|
||||
if (!isMaster) {
|
||||
param['indexer'] = $('#indexer').val();
|
||||
param['mindexer'] = radio.attr('data-indexer');
|
||||
param['mindexerid'] = radio.attr('data-indexerid');
|
||||
param['paused'] = paused ? '1' : '0';
|
||||
param['markwanted'] = markWanted ? '1' : '0';
|
||||
panelSaveGet.removeClass('show').addClass('hide');
|
||||
saveWait.removeClass('hide').addClass('show');
|
||||
}
|
||||
|
||||
$.getJSON(sbRoot + '/home/saveMapping', param)
|
||||
.done(function (data) {
|
||||
allf.prop('disabled', !1);
|
||||
sbutton.prop('disabled', !1);
|
||||
panelSaveGet.removeClass('hide').addClass('show');
|
||||
saveWait.removeClass('show').addClass('hide');
|
||||
if (undef(data.error)) {
|
||||
$.each(data.map, function (i, item) {
|
||||
$('#mid-' + i).val(item.id);
|
||||
$('#lockid-' + i).prop('checked', -100 == item.status)
|
||||
});
|
||||
/** @namespace data.switch */
|
||||
/** @namespace data.switch.mid */
|
||||
if (!isMaster && data.hasOwnProperty('switch') && data.switch.hasOwnProperty('Success')) {
|
||||
window.location.replace(sbRoot + '/home/displayShow?show=' + data.mid);
|
||||
} else if ((0 < $('*[data-maybe-master=1]').length)
|
||||
&& (((0 == $('[name^=set-master]').length) && (0 < $('*[data-maybe-master=1]').val()))
|
||||
|| ((0 < $('[name^=set-master]').length) && (0 == $('*[data-maybe-master=1]').val())))) {
|
||||
location.reload();
|
||||
}
|
||||
}})
|
||||
.fail(function (data) {
|
||||
allf.prop('disabled', !1);
|
||||
sbutton.prop('disabled', !1);
|
||||
});
|
||||
}
|
||||
|
||||
function resetMapping() {
|
||||
var fbutton = $(this), mid = $('[id^=mid-]'), lock = $('[id^=lockid-]'),
|
||||
allf = $('[id^=mid-], [id^=lockid-], #save-mapping, [name^=set-master]');
|
||||
|
||||
allf.prop('disabled', !0);
|
||||
fbutton.prop('disabled', !0);
|
||||
|
||||
var param = {'show': $('#show').val()};
|
||||
mid.each(function (i, selected) {
|
||||
param[$(selected).attr('id')] = $(selected).val();
|
||||
});
|
||||
|
||||
lock.each(function (i, selected) {
|
||||
param[$(selected).attr('id')] = $(selected).prop('checked');
|
||||
});
|
||||
|
||||
$.getJSON(sbRoot + '/home/forceMapping', param)
|
||||
.done(function (data) {
|
||||
allf.prop('disabled', !1);
|
||||
fbutton.prop('disabled', !1);
|
||||
if (undef(data.error)) {
|
||||
$('#the-master').prop('checked', !0).trigger('click');
|
||||
$.each(data, function (i, item) {
|
||||
$('#mid-' + i).val(item.id);
|
||||
$('#lockid-' + i).prop('checked', -100 == item.status);
|
||||
});
|
||||
updateSrcLinks();
|
||||
}})
|
||||
.fail(function (data) {
|
||||
allf.prop('disabled', !1);
|
||||
fbutton.prop('disabled', !1);
|
||||
});
|
||||
}
|
||||
|
||||
$('#save-mapping, #reset-mapping').click(function() {
|
||||
|
||||
var save = /save/i.test($(this).attr('id')),
|
||||
radio = $('[name=set-master]:checked'), isMaster = !radio.length || 'the-master' == radio.attr('id'),
|
||||
newMaster = (save && !isMaster),
|
||||
paused = 'on' == $('#paused:checked').val(),
|
||||
extraWarn = !newMaster ? '' : 'Warning: Changing the master source can produce undesirable'
|
||||
+ ' results if episodes do not match at old and new TV info sources<br /><br />'
|
||||
+ (paused ? '' : '<input type="checkbox" id="mark-wanted" style="margin-right:6px">'
|
||||
+ '<span class="red-text">Mark all added episodes Wanted to search for releases</span>'
|
||||
+ '</input><br /><br />'),
|
||||
checkAction = !newMaster ? 'save ID changes' : 'change the TV info source';
|
||||
|
||||
$.confirm({
|
||||
'title': save ? 'Confirm changes' : 'Get default IDs',
|
||||
'message': extraWarn + 'Are you sure you want to ' + (save ? checkAction : 'fetch default IDs') + ' ?',
|
||||
'buttons': {
|
||||
'Yes': {
|
||||
'class': 'green',
|
||||
'action': function () {
|
||||
save ? saveMapping(paused, 'on' == $('#mark-wanted:checked').val()) : resetMapping()
|
||||
}
|
||||
},
|
||||
'No': {
|
||||
'class': 'red',
|
||||
'action': function () {}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
|
|
@ -4,6 +4,7 @@ import time
|
|||
import os
|
||||
import sys
|
||||
import errno
|
||||
import shutil
|
||||
|
||||
from . import (LockBase, LockFailed, NotLocked, NotMyLock, LockTimeout,
|
||||
AlreadyLocked)
|
||||
|
@ -24,7 +25,7 @@ class MkdirLockFile(LockBase):
|
|||
self.pid))
|
||||
|
||||
def acquire(self, timeout=None):
|
||||
timeout = timeout is not None and timeout or self.timeout
|
||||
timeout = timeout if timeout is not None else self.timeout
|
||||
end_time = time.time()
|
||||
if timeout is not None and timeout > 0:
|
||||
end_time += timeout
|
||||
|
@ -67,7 +68,16 @@ class MkdirLockFile(LockBase):
|
|||
elif not os.path.exists(self.unique_name):
|
||||
raise NotMyLock("%s is locked, but not by me" % self.path)
|
||||
os.unlink(self.unique_name)
|
||||
os.rmdir(self.lock_file)
|
||||
self.delete_directory()
|
||||
|
||||
def delete_directory(self):
|
||||
# NOTE(dims): We may end up with a race condition here. The path
|
||||
# can be deleted between the .exists() and the .rmtree() call.
|
||||
# So we should catch any exception if the path does not exist.
|
||||
try:
|
||||
shutil.rmtree(self.lock_file)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def is_locked(self):
|
||||
return os.path.exists(self.lock_file)
|
||||
|
@ -78,6 +88,4 @@ class MkdirLockFile(LockBase):
|
|||
|
||||
def break_lock(self):
|
||||
if os.path.exists(self.lock_file):
|
||||
for name in os.listdir(self.lock_file):
|
||||
os.unlink(os.path.join(self.lock_file, name))
|
||||
os.rmdir(self.lock_file)
|
||||
self.delete_directory()
|
||||
|
|
|
@ -37,7 +37,7 @@ sys.path.insert(1, os.path.abspath('../lib'))
|
|||
from sickbeard import helpers, encodingKludge as ek
|
||||
from sickbeard import db, logger, naming, metadata, providers, scene_exceptions, scene_numbering, \
|
||||
scheduler, auto_post_processer, search_queue, search_propers, search_recent, search_backlog, \
|
||||
show_queue, show_updater, subtitles, traktChecker, version_checker
|
||||
show_queue, show_updater, subtitles, traktChecker, version_checker, indexermapper
|
||||
from sickbeard.config import CheckSection, check_setting_int, check_setting_str, ConfigMigrator, minimax
|
||||
from sickbeard.common import SD, SKIPPED
|
||||
from sickbeard.databases import mainDB, cache_db, failed_db
|
||||
|
@ -51,6 +51,7 @@ from lib.adba.aniDBerrors import (AniDBError, AniDBBannedError)
|
|||
from lib.configobj import ConfigObj
|
||||
from lib.libtrakt import TraktAPI
|
||||
import trakt_helpers
|
||||
import threading
|
||||
|
||||
PID = None
|
||||
|
||||
|
@ -83,6 +84,7 @@ properFinderScheduler = None
|
|||
autoPostProcesserScheduler = None
|
||||
subtitlesFinderScheduler = None
|
||||
traktCheckerScheduler = None
|
||||
background_mapping_task = None
|
||||
|
||||
showList = None
|
||||
UPDATE_SHOWS_ON_START = False
|
||||
|
@ -217,12 +219,13 @@ DEFAULT_UPDATE_FREQUENCY = 1
|
|||
|
||||
MIN_AUTOPOSTPROCESSER_FREQUENCY = 1
|
||||
MIN_RECENTSEARCH_FREQUENCY = 10
|
||||
MIN_BACKLOG_FREQUENCY = 2
|
||||
MAX_BACKLOG_FREQUENCY = 35
|
||||
MIN_BACKLOG_FREQUENCY = 7
|
||||
MAX_BACKLOG_FREQUENCY = 42
|
||||
MIN_UPDATE_FREQUENCY = 1
|
||||
|
||||
BACKLOG_DAYS = 7
|
||||
SEARCH_UNAIRED = False
|
||||
UNAIRED_RECENT_SEARCH_ONLY = True
|
||||
|
||||
ADD_SHOWS_WO_DIR = False
|
||||
REMOVE_FILENAME_CHARS = None
|
||||
|
@ -541,9 +544,9 @@ def initialize(consoleLogging=True):
|
|||
USE_FAILED_DOWNLOADS, DELETE_FAILED, ANON_REDIRECT, TMDB_API_KEY, DEBUG, PROXY_SETTING, PROXY_INDEXERS, \
|
||||
AUTOPOSTPROCESSER_FREQUENCY, DEFAULT_AUTOPOSTPROCESSER_FREQUENCY, MIN_AUTOPOSTPROCESSER_FREQUENCY, \
|
||||
ANIME_DEFAULT, NAMING_ANIME, USE_ANIDB, ANIDB_USERNAME, ANIDB_PASSWORD, ANIDB_USE_MYLIST, \
|
||||
SCENE_DEFAULT, BACKLOG_DAYS, SEARCH_UNAIRED, ANIME_TREAT_AS_HDTV, \
|
||||
SCENE_DEFAULT, BACKLOG_DAYS, SEARCH_UNAIRED, UNAIRED_RECENT_SEARCH_ONLY, ANIME_TREAT_AS_HDTV, \
|
||||
COOKIE_SECRET, USE_IMDB_INFO, IMDB_ACCOUNTS, DISPLAY_BACKGROUND, DISPLAY_BACKGROUND_TRANSPARENT, DISPLAY_ALL_SEASONS, \
|
||||
SHOW_TAGS, DEFAULT_SHOW_TAG, SHOWLIST_TAGVIEW
|
||||
SHOW_TAGS, DEFAULT_SHOW_TAG, SHOWLIST_TAGVIEW, background_mapping_task
|
||||
|
||||
if __INITIALIZED__:
|
||||
return False
|
||||
|
@ -615,7 +618,8 @@ def initialize(consoleLogging=True):
|
|||
TIME_PRESET = TIME_PRESET_W_SECONDS.replace(u':%S', u'')
|
||||
TIMEZONE_DISPLAY = check_setting_str(CFG, 'GUI', 'timezone_display', 'network')
|
||||
DISPLAY_BACKGROUND = bool(check_setting_int(CFG, 'General', 'display_background', 0))
|
||||
DISPLAY_BACKGROUND_TRANSPARENT = check_setting_str(CFG, 'General', 'display_background_transparent', 'transparent')
|
||||
DISPLAY_BACKGROUND_TRANSPARENT = check_setting_str(CFG, 'General', 'display_background_transparent',
|
||||
'transparent')
|
||||
DISPLAY_ALL_SEASONS = bool(check_setting_int(CFG, 'General', 'display_all_seasons', 1))
|
||||
SHOW_TAGS = check_setting_str(CFG, 'GUI', 'show_tags', 'Show List').split(',')
|
||||
DEFAULT_SHOW_TAG = check_setting_str(CFG, 'GUI', 'default_show_tag', 'Show List')
|
||||
|
@ -705,7 +709,8 @@ def initialize(consoleLogging=True):
|
|||
NAMING_ABD_PATTERN = check_setting_str(CFG, 'General', 'naming_abd_pattern', '%SN - %A.D - %EN')
|
||||
NAMING_CUSTOM_ABD = bool(check_setting_int(CFG, 'General', 'naming_custom_abd', 0))
|
||||
NAMING_SPORTS_PATTERN = check_setting_str(CFG, 'General', 'naming_sports_pattern', '%SN - %A-D - %EN')
|
||||
NAMING_ANIME_PATTERN = check_setting_str(CFG, 'General', 'naming_anime_pattern', 'Season %0S/%SN - S%0SE%0E - %EN')
|
||||
NAMING_ANIME_PATTERN = check_setting_str(CFG, 'General', 'naming_anime_pattern',
|
||||
'Season %0S/%SN - S%0SE%0E - %EN')
|
||||
NAMING_ANIME = check_setting_int(CFG, 'General', 'naming_anime', 3)
|
||||
NAMING_CUSTOM_SPORTS = bool(check_setting_int(CFG, 'General', 'naming_custom_sports', 0))
|
||||
NAMING_CUSTOM_ANIME = bool(check_setting_int(CFG, 'General', 'naming_custom_anime', 0))
|
||||
|
@ -750,7 +755,8 @@ def initialize(consoleLogging=True):
|
|||
RECENTSEARCH_FREQUENCY = MIN_RECENTSEARCH_FREQUENCY
|
||||
|
||||
BACKLOG_FREQUENCY = check_setting_int(CFG, 'General', 'backlog_frequency', DEFAULT_BACKLOG_FREQUENCY)
|
||||
BACKLOG_FREQUENCY = minimax(BACKLOG_FREQUENCY, DEFAULT_BACKLOG_FREQUENCY, MIN_BACKLOG_FREQUENCY, MAX_BACKLOG_FREQUENCY)
|
||||
BACKLOG_FREQUENCY = minimax(BACKLOG_FREQUENCY, DEFAULT_BACKLOG_FREQUENCY,
|
||||
MIN_BACKLOG_FREQUENCY, MAX_BACKLOG_FREQUENCY)
|
||||
|
||||
UPDATE_FREQUENCY = check_setting_int(CFG, 'General', 'update_frequency', DEFAULT_UPDATE_FREQUENCY)
|
||||
if UPDATE_FREQUENCY < MIN_UPDATE_FREQUENCY:
|
||||
|
@ -758,6 +764,7 @@ def initialize(consoleLogging=True):
|
|||
|
||||
BACKLOG_DAYS = check_setting_int(CFG, 'General', 'backlog_days', 7)
|
||||
SEARCH_UNAIRED = bool(check_setting_int(CFG, 'General', 'search_unaired', 0))
|
||||
UNAIRED_RECENT_SEARCH_ONLY = bool(check_setting_int(CFG, 'General', 'unaired_recent_search_only', 1))
|
||||
|
||||
NZB_DIR = check_setting_str(CFG, 'Blackhole', 'nzb_dir', '')
|
||||
TORRENT_DIR = check_setting_str(CFG, 'Blackhole', 'torrent_dir', '')
|
||||
|
@ -914,7 +921,8 @@ def initialize(consoleLogging=True):
|
|||
TRAKT_START_PAUSED = bool(check_setting_int(CFG, 'Trakt', 'trakt_start_paused', 0))
|
||||
TRAKT_SYNC = bool(check_setting_int(CFG, 'Trakt', 'trakt_sync', 0))
|
||||
TRAKT_DEFAULT_INDEXER = check_setting_int(CFG, 'Trakt', 'trakt_default_indexer', 1)
|
||||
TRAKT_UPDATE_COLLECTION = trakt_helpers.read_config_string(check_setting_str(CFG, 'Trakt', 'trakt_update_collection', ''))
|
||||
TRAKT_UPDATE_COLLECTION = trakt_helpers.read_config_string(
|
||||
check_setting_str(CFG, 'Trakt', 'trakt_update_collection', ''))
|
||||
TRAKT_ACCOUNTS = TraktAPI.read_config_string(check_setting_str(CFG, 'Trakt', 'trakt_accounts', ''))
|
||||
TRAKT_MRU = check_setting_str(CFG, 'Trakt', 'trakt_mru', '')
|
||||
|
||||
|
@ -1141,39 +1149,59 @@ def initialize(consoleLogging=True):
|
|||
# initialize schedulers
|
||||
# updaters
|
||||
update_now = datetime.timedelta(minutes=0)
|
||||
versionCheckScheduler = scheduler.Scheduler(version_checker.CheckVersion(),
|
||||
versionCheckScheduler = scheduler.Scheduler(
|
||||
version_checker.CheckVersion(),
|
||||
cycleTime=datetime.timedelta(hours=UPDATE_FREQUENCY),
|
||||
threadName='CHECKVERSION',
|
||||
silent=False)
|
||||
|
||||
showQueueScheduler = scheduler.Scheduler(show_queue.ShowQueue(),
|
||||
showQueueScheduler = scheduler.Scheduler(
|
||||
show_queue.ShowQueue(),
|
||||
cycleTime=datetime.timedelta(seconds=3),
|
||||
threadName='SHOWQUEUE')
|
||||
|
||||
showUpdateScheduler = scheduler.Scheduler(show_updater.ShowUpdater(),
|
||||
showUpdateScheduler = scheduler.Scheduler(
|
||||
show_updater.ShowUpdater(),
|
||||
cycleTime=datetime.timedelta(hours=1),
|
||||
threadName='SHOWUPDATER',
|
||||
start_time=datetime.time(hour=SHOW_UPDATE_HOUR),
|
||||
prevent_cycle_run=showQueueScheduler.action.isShowUpdateRunning) # 3 AM
|
||||
prevent_cycle_run=showQueueScheduler.action.isShowUpdateRunning) # 3AM
|
||||
|
||||
# searchers
|
||||
searchQueueScheduler = scheduler.Scheduler(search_queue.SearchQueue(),
|
||||
searchQueueScheduler = scheduler.Scheduler(
|
||||
search_queue.SearchQueue(),
|
||||
cycleTime=datetime.timedelta(seconds=3),
|
||||
threadName='SEARCHQUEUE')
|
||||
|
||||
update_interval = datetime.timedelta(minutes=(RECENTSEARCH_FREQUENCY, 1)[4489 == RECENTSEARCH_FREQUENCY])
|
||||
recentSearchScheduler = scheduler.Scheduler(search_recent.RecentSearcher(),
|
||||
recentSearchScheduler = scheduler.Scheduler(
|
||||
search_recent.RecentSearcher(),
|
||||
cycleTime=update_interval,
|
||||
threadName='RECENTSEARCHER',
|
||||
run_delay=update_now if RECENTSEARCH_STARTUP
|
||||
else datetime.timedelta(minutes=5),
|
||||
prevent_cycle_run=searchQueueScheduler.action.is_recentsearch_in_progress)
|
||||
|
||||
backlogSearchScheduler = search_backlog.BacklogSearchScheduler(search_backlog.BacklogSearcher(),
|
||||
if [x for x in providers.sortedProviderList() if x.is_active() and
|
||||
x.enable_backlog and x.providerType == GenericProvider.NZB]:
|
||||
nextbacklogpossible = datetime.datetime.fromtimestamp(
|
||||
search_backlog.BacklogSearcher().last_runtime) + datetime.timedelta(hours=23)
|
||||
now = datetime.datetime.now()
|
||||
if nextbacklogpossible > now:
|
||||
time_diff = nextbacklogpossible - now
|
||||
if (time_diff > datetime.timedelta(hours=12) and
|
||||
nextbacklogpossible - datetime.timedelta(hours=12) > now):
|
||||
time_diff = time_diff - datetime.timedelta(hours=12)
|
||||
else:
|
||||
time_diff = datetime.timedelta(minutes=0)
|
||||
backlogdelay = helpers.tryInt((time_diff.total_seconds() / 60) + 10, 10)
|
||||
else:
|
||||
backlogdelay = 10
|
||||
backlogSearchScheduler = search_backlog.BacklogSearchScheduler(
|
||||
search_backlog.BacklogSearcher(),
|
||||
cycleTime=datetime.timedelta(minutes=get_backlog_cycle_time()),
|
||||
threadName='BACKLOG',
|
||||
run_delay=update_now if BACKLOG_STARTUP
|
||||
else datetime.timedelta(minutes=10),
|
||||
run_delay=datetime.timedelta(minutes=backlogdelay),
|
||||
prevent_cycle_run=searchQueueScheduler.action.is_standard_backlog_in_progress)
|
||||
|
||||
propers_searcher = search_propers.ProperSearcher()
|
||||
|
@ -1185,7 +1213,8 @@ def initialize(consoleLogging=True):
|
|||
update_interval = datetime.timedelta(hours=1)
|
||||
run_at = datetime.time(hour=1) # 1 AM
|
||||
|
||||
properFinderScheduler = scheduler.Scheduler(propers_searcher,
|
||||
properFinderScheduler = scheduler.Scheduler(
|
||||
propers_searcher,
|
||||
cycleTime=update_interval,
|
||||
threadName='FINDPROPERS',
|
||||
start_time=run_at,
|
||||
|
@ -1193,18 +1222,21 @@ def initialize(consoleLogging=True):
|
|||
prevent_cycle_run=searchQueueScheduler.action.is_propersearch_in_progress)
|
||||
|
||||
# processors
|
||||
autoPostProcesserScheduler = scheduler.Scheduler(auto_post_processer.PostProcesser(),
|
||||
autoPostProcesserScheduler = scheduler.Scheduler(
|
||||
auto_post_processer.PostProcesser(),
|
||||
cycleTime=datetime.timedelta(
|
||||
minutes=AUTOPOSTPROCESSER_FREQUENCY),
|
||||
threadName='POSTPROCESSER',
|
||||
silent=not PROCESS_AUTOMATICALLY)
|
||||
|
||||
traktCheckerScheduler = scheduler.Scheduler(traktChecker.TraktChecker(),
|
||||
traktCheckerScheduler = scheduler.Scheduler(
|
||||
traktChecker.TraktChecker(),
|
||||
cycleTime=datetime.timedelta(hours=1),
|
||||
threadName='TRAKTCHECKER',
|
||||
silent=not USE_TRAKT)
|
||||
|
||||
subtitlesFinderScheduler = scheduler.Scheduler(subtitles.SubtitlesFinder(),
|
||||
subtitlesFinderScheduler = scheduler.Scheduler(
|
||||
subtitles.SubtitlesFinder(),
|
||||
cycleTime=datetime.timedelta(hours=SUBTITLES_FINDER_FREQUENCY),
|
||||
threadName='FINDSUBTITLES',
|
||||
silent=not USE_SUBTITLES)
|
||||
|
@ -1212,6 +1244,8 @@ def initialize(consoleLogging=True):
|
|||
showList = []
|
||||
loadingShowList = {}
|
||||
|
||||
background_mapping_task = threading.Thread(name='LOAD-MAPPINGS', target=indexermapper.load_mapped_ids)
|
||||
|
||||
__INITIALIZED__ = True
|
||||
return True
|
||||
|
||||
|
@ -1221,10 +1255,15 @@ def start():
|
|||
showUpdateScheduler, versionCheckScheduler, showQueueScheduler, \
|
||||
properFinderScheduler, autoPostProcesserScheduler, searchQueueScheduler, \
|
||||
subtitlesFinderScheduler, USE_SUBTITLES, traktCheckerScheduler, \
|
||||
recentSearchScheduler, events, started
|
||||
recentSearchScheduler, events, started, background_mapping_task
|
||||
|
||||
with INIT_LOCK:
|
||||
if __INITIALIZED__:
|
||||
# Load all Indexer mappings in background
|
||||
indexermapper.defunct_indexer = [i for i in indexerApi().all_indexers if indexerApi(i).config.get('defunct')]
|
||||
indexermapper.indexer_list = [i for i in indexerApi().all_indexers]
|
||||
background_mapping_task.start()
|
||||
|
||||
# start sysetm events queue
|
||||
events.start()
|
||||
|
||||
|
@ -1259,8 +1298,8 @@ def start():
|
|||
subtitlesFinderScheduler.start()
|
||||
|
||||
# start the trakt checker
|
||||
#if USE_TRAKT:
|
||||
#traktCheckerScheduler.start()
|
||||
# if USE_TRAKT:
|
||||
# traktCheckerScheduler.start()
|
||||
|
||||
started = True
|
||||
|
||||
|
@ -1414,7 +1453,8 @@ def save_config():
|
|||
new_config = ConfigObj()
|
||||
new_config.filename = CONFIG_FILE
|
||||
|
||||
# For passwords you must include the word `password` in the item_name and add `helpers.encrypt(ITEM_NAME, ENCRYPTION_VERSION)` in save_config()
|
||||
# For passwords you must include the word `password` in the item_name and
|
||||
# add `helpers.encrypt(ITEM_NAME, ENCRYPTION_VERSION)` in save_config()
|
||||
new_config['General'] = {}
|
||||
new_config['General']['branch'] = BRANCH
|
||||
new_config['General']['git_remote'] = GIT_REMOTE
|
||||
|
@ -1506,6 +1546,7 @@ def save_config():
|
|||
|
||||
new_config['General']['backlog_days'] = int(BACKLOG_DAYS)
|
||||
new_config['General']['search_unaired'] = int(SEARCH_UNAIRED)
|
||||
new_config['General']['unaired_recent_search_only'] = int(UNAIRED_RECENT_SEARCH_ONLY)
|
||||
|
||||
new_config['General']['cache_dir'] = ACTUAL_CACHE_DIR if ACTUAL_CACHE_DIR else 'cache'
|
||||
new_config['General']['root_dirs'] = ROOT_DIRS if ROOT_DIRS else ''
|
||||
|
|
|
@ -177,7 +177,7 @@ class ShowListUI:
|
|||
|
||||
|
||||
class Proper:
|
||||
def __init__(self, name, url, date, show):
|
||||
def __init__(self, name, url, date, show, parsed_show=None):
|
||||
self.name = name
|
||||
self.url = url
|
||||
self.date = date
|
||||
|
@ -186,6 +186,7 @@ class Proper:
|
|||
self.release_group = None
|
||||
self.version = -1
|
||||
|
||||
self.parsed_show = parsed_show
|
||||
self.show = show
|
||||
self.indexer = None
|
||||
self.indexerid = -1
|
||||
|
|
|
@ -16,16 +16,15 @@
|
|||
# You should have received a copy of the GNU General Public License
|
||||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import os.path
|
||||
import operator
|
||||
import os.path
|
||||
import platform
|
||||
import re
|
||||
import uuid
|
||||
import traceback
|
||||
import uuid
|
||||
|
||||
import sickbeard
|
||||
import logger
|
||||
|
||||
import sickbeard
|
||||
|
||||
INSTANCE_ID = str(uuid.uuid1())
|
||||
|
||||
|
@ -409,7 +408,8 @@ class Overview:
|
|||
# For both snatched statuses. Note: SNATCHED/QUAL have same value and break dict.
|
||||
SNATCHED = SNATCHED_PROPER = SNATCHED_BEST # 9
|
||||
|
||||
overviewStrings = {SKIPPED: 'skipped',
|
||||
overviewStrings = {UNKNOWN: 'unknown',
|
||||
SKIPPED: 'skipped',
|
||||
WANTED: 'wanted',
|
||||
QUAL: 'qual',
|
||||
GOOD: 'good',
|
||||
|
|
|
@ -19,7 +19,8 @@
|
|||
from sickbeard import db
|
||||
|
||||
MIN_DB_VERSION = 1
|
||||
MAX_DB_VERSION = 2
|
||||
MAX_DB_VERSION = 3
|
||||
|
||||
|
||||
# Add new migrations at the bottom of the list; subclass the previous migration.
|
||||
class InitialSchema(db.SchemaUpgrade):
|
||||
|
@ -32,17 +33,21 @@ class InitialSchema(db.SchemaUpgrade):
|
|||
'CREATE TABLE lastSearch (provider TEXT, time NUMERIC)',
|
||||
'CREATE TABLE db_version (db_version INTEGER)',
|
||||
'INSERT INTO db_version (db_version) VALUES (1)',
|
||||
'CREATE TABLE scene_exceptions (exception_id INTEGER PRIMARY KEY, indexer_id INTEGER KEY,'
|
||||
' show_name TEXT, season NUMERIC, custom NUMERIC)',
|
||||
'CREATE TABLE scene_names (indexer_id INTEGER, name TEXT)',
|
||||
'CREATE TABLE network_timezones (network_name TEXT PRIMARY KEY, timezone TEXT)',
|
||||
'CREATE TABLE scene_exceptions_refresh (list TEXT PRIMARY KEY, last_refreshed INTEGER)',
|
||||
'CREATE TABLE network_conversions ('
|
||||
'tvdb_network TEXT PRIMARY KEY, tvrage_network TEXT, tvrage_country TEXT)',
|
||||
'CREATE INDEX tvrage_idx on network_conversions (tvrage_network, tvrage_country)',
|
||||
'CREATE TABLE provider_cache (provider TEXT ,name TEXT, season NUMERIC, episodes TEXT,'
|
||||
' indexerid NUMERIC, url TEXT UNIQUE, time NUMERIC, quality TEXT, release_group TEXT, '
|
||||
'version NUMERIC)',
|
||||
'CREATE TABLE IF NOT EXISTS "backlogparts" ("part" NUMERIC NOT NULL ,'
|
||||
' "indexer" NUMERIC NOT NULL , "indexerid" NUMERIC NOT NULL )',
|
||||
'CREATE TABLE IF NOT EXISTS "lastrecentsearch" ("name" TEXT PRIMARY KEY NOT NULL'
|
||||
' , "datetime" NUMERIC NOT NULL )',
|
||||
]
|
||||
for query in queries:
|
||||
self.connection.action(query)
|
||||
self.setDBVersion(3)
|
||||
|
||||
|
||||
class ConsolidateProviders(InitialSchema):
|
||||
|
@ -59,11 +64,38 @@ class ConsolidateProviders(InitialSchema):
|
|||
' indexerid NUMERIC, url TEXT UNIQUE, time NUMERIC, quality TEXT, release_group TEXT, '
|
||||
'version NUMERIC)')
|
||||
|
||||
keep_tables = set(['lastUpdate', 'lastSearch', 'db_version', 'scene_exceptions', 'scene_names',
|
||||
'network_timezones', 'scene_exceptions_refresh', 'network_conversions', 'provider_cache'])
|
||||
keep_tables = set(['lastUpdate', 'lastSearch', 'db_version',
|
||||
'network_timezones', 'network_conversions', 'provider_cache'])
|
||||
current_tables = set(self.listTables())
|
||||
remove_tables = list(current_tables - keep_tables)
|
||||
for table in remove_tables:
|
||||
self.connection.action('DROP TABLE [%s]' % table)
|
||||
|
||||
self.incDBVersion()
|
||||
|
||||
|
||||
class AddBacklogParts(ConsolidateProviders):
|
||||
def test(self):
|
||||
return self.checkDBVersion() > 2
|
||||
|
||||
def execute(self):
|
||||
|
||||
db.backup_database('cache.db', self.checkDBVersion())
|
||||
if self.hasTable('scene_names'):
|
||||
self.connection.action('DROP TABLE scene_names')
|
||||
|
||||
if not self.hasTable('backlogparts'):
|
||||
self.connection.action('CREATE TABLE IF NOT EXISTS "backlogparts" ("part" NUMERIC NOT NULL ,'
|
||||
' "indexer" NUMERIC NOT NULL , "indexerid" NUMERIC NOT NULL )')
|
||||
|
||||
if not self.hasTable('lastrecentsearch'):
|
||||
self.connection.action('CREATE TABLE IF NOT EXISTS "lastrecentsearch" ("name" TEXT PRIMARY KEY NOT NULL'
|
||||
' , "datetime" NUMERIC NOT NULL )')
|
||||
|
||||
if self.hasTable('scene_exceptions_refresh'):
|
||||
self.connection.action('DROP TABLE scene_exceptions_refresh')
|
||||
if self.hasTable('scene_exceptions'):
|
||||
self.connection.action('DROP TABLE scene_exceptions')
|
||||
self.connection.action('VACUUM')
|
||||
|
||||
self.incDBVersion()
|
||||
|
|
|
@ -27,7 +27,7 @@ from sickbeard import encodingKludge as ek
|
|||
from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException
|
||||
|
||||
MIN_DB_VERSION = 9 # oldest db version we support migrating from
|
||||
MAX_DB_VERSION = 20003
|
||||
MAX_DB_VERSION = 20004
|
||||
|
||||
|
||||
class MainSanityCheck(db.DBSanityCheck):
|
||||
|
@ -1096,3 +1096,57 @@ class AddTvShowTags(db.SchemaUpgrade):
|
|||
|
||||
self.setDBVersion(20003)
|
||||
return self.checkDBVersion()
|
||||
|
||||
# 20003 -> 20004
|
||||
class ChangeMapIndexer(db.SchemaUpgrade):
|
||||
def execute(self):
|
||||
db.backup_database('sickbeard.db', self.checkDBVersion())
|
||||
|
||||
if self.hasTable('indexer_mapping'):
|
||||
self.connection.action('DROP TABLE indexer_mapping')
|
||||
|
||||
logger.log(u'Changing table indexer_mapping')
|
||||
self.connection.action(
|
||||
'CREATE TABLE indexer_mapping (indexer_id INTEGER, indexer NUMERIC, mindexer_id INTEGER NOT NULL, mindexer NUMERIC, date NUMERIC NOT NULL DEFAULT 0, status INTEGER NOT NULL DEFAULT 0, PRIMARY KEY (indexer_id, indexer, mindexer))')
|
||||
|
||||
self.connection.action('CREATE INDEX IF NOT EXISTS idx_mapping ON indexer_mapping (indexer_id, indexer)')
|
||||
|
||||
if not self.hasColumn('info', 'last_run_backlog'):
|
||||
logger.log('Adding last_run_backlog to info')
|
||||
self.addColumn('info', 'last_run_backlog', 'NUMERIC', 1)
|
||||
|
||||
logger.log(u'Moving table scene_exceptions from cache.db to sickbeard.db')
|
||||
if self.hasTable('scene_exceptions_refresh'):
|
||||
self.connection.action('DROP TABLE scene_exceptions_refresh')
|
||||
self.connection.action('CREATE TABLE scene_exceptions_refresh (list TEXT PRIMARY KEY, last_refreshed INTEGER)')
|
||||
if self.hasTable('scene_exceptions'):
|
||||
self.connection.action('DROP TABLE scene_exceptions')
|
||||
self.connection.action('CREATE TABLE scene_exceptions (exception_id INTEGER PRIMARY KEY, indexer_id INTEGER KEY, show_name TEXT, season NUMERIC, custom NUMERIC)')
|
||||
|
||||
try:
|
||||
cachedb = db.DBConnection(filename='cache.db')
|
||||
if cachedb.hasTable('scene_exceptions'):
|
||||
sqlResults = cachedb.action('SELECT * FROM scene_exceptions')
|
||||
cs = []
|
||||
for r in sqlResults:
|
||||
cs.append(['INSERT OR REPLACE INTO scene_exceptions (exception_id, indexer_id, show_name, season, custom)'
|
||||
' VALUES (?,?,?,?,?)', [r['exception_id'], r['indexer_id'], r['show_name'],
|
||||
r['season'], r['custom']]])
|
||||
|
||||
if len(cs) > 0:
|
||||
self.connection.mass_action(cs)
|
||||
except:
|
||||
pass
|
||||
|
||||
keep_tables = {'scene_exceptions', 'scene_exceptions_refresh', 'info', 'indexer_mapping', 'blacklist',
|
||||
'db_version', 'history', 'imdb_info', 'lastUpdate', 'scene_numbering', 'tv_episodes', 'tv_shows',
|
||||
'whitelist', 'xem_refresh'}
|
||||
current_tables = set(self.listTables())
|
||||
remove_tables = list(current_tables - keep_tables)
|
||||
for table in remove_tables:
|
||||
self.connection.action('DROP TABLE [%s]' % table)
|
||||
|
||||
self.connection.action('VACUUM')
|
||||
|
||||
self.setDBVersion(20004)
|
||||
return self.checkDBVersion()
|
||||
|
|
|
@ -448,6 +448,7 @@ def MigrationCode(myDB):
|
|||
20000: sickbeard.mainDB.DBIncreaseTo20001,
|
||||
20001: sickbeard.mainDB.AddTvShowOverview,
|
||||
20002: sickbeard.mainDB.AddTvShowTags,
|
||||
20003: sickbeard.mainDB.ChangeMapIndexer
|
||||
# 20002: sickbeard.mainDB.AddCoolSickGearFeature3,
|
||||
}
|
||||
|
||||
|
|
|
@ -18,7 +18,11 @@
|
|||
|
||||
from __future__ import print_function
|
||||
from __future__ import with_statement
|
||||
|
||||
import base64
|
||||
import datetime
|
||||
import getpass
|
||||
import hashlib
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
|
@ -27,18 +31,14 @@ import stat
|
|||
import tempfile
|
||||
import time
|
||||
import traceback
|
||||
import hashlib
|
||||
import urlparse
|
||||
import uuid
|
||||
import base64
|
||||
import datetime
|
||||
|
||||
import sickbeard
|
||||
import subliminal
|
||||
import adba
|
||||
import requests
|
||||
import requests.exceptions
|
||||
|
||||
import sickbeard
|
||||
import subliminal
|
||||
|
||||
try:
|
||||
import json
|
||||
|
@ -51,7 +51,7 @@ except ImportError:
|
|||
import elementtree.ElementTree as etree
|
||||
|
||||
from sickbeard.exceptions import MultipleShowObjectsException, ex
|
||||
from sickbeard import logger, classes, db, notifiers, clients
|
||||
from sickbeard import logger, db, notifiers, clients
|
||||
from sickbeard.common import USER_AGENT, mediaExtensions, subtitleExtensions, cpu_presets
|
||||
from sickbeard import encodingKludge as ek
|
||||
|
||||
|
@ -178,6 +178,33 @@ def findCertainShow(showList, indexerid):
|
|||
raise MultipleShowObjectsException()
|
||||
|
||||
|
||||
def find_show_by_id(show_list, id_dict, no_mapped_ids=True):
|
||||
"""
|
||||
|
||||
:param show_list:
|
||||
:type show_list: list
|
||||
:param id_dict: {indexer: id}
|
||||
:type id_dict: dict
|
||||
:param no_mapped_ids:
|
||||
:type no_mapped_ids: bool
|
||||
:return: showObj or MultipleShowObjectsException
|
||||
"""
|
||||
results = []
|
||||
if show_list and id_dict and isinstance(id_dict, dict):
|
||||
id_dict = {k: v for k, v in id_dict.items() if v > 0}
|
||||
if no_mapped_ids:
|
||||
results = list(set([s for k, v in id_dict.iteritems() for s in show_list
|
||||
if k == s.indexer and v == s.indexerid]))
|
||||
else:
|
||||
results = list(set([s for k, v in id_dict.iteritems() for s in show_list
|
||||
if v == s.ids.get(k, {'id': 0})['id']]))
|
||||
|
||||
if len(results) == 1:
|
||||
return results[0]
|
||||
elif len(results) > 1:
|
||||
raise MultipleShowObjectsException()
|
||||
|
||||
|
||||
def makeDir(path):
|
||||
if not ek.ek(os.path.isdir, path):
|
||||
try:
|
||||
|
@ -960,64 +987,6 @@ def set_up_anidb_connection():
|
|||
return sickbeard.ADBA_CONNECTION.authed()
|
||||
|
||||
|
||||
def mapIndexersToShow(showObj):
|
||||
mapped = {}
|
||||
|
||||
# init mapped indexers object
|
||||
for indexer in sickbeard.indexerApi().indexers:
|
||||
mapped[indexer] = showObj.indexerid if int(indexer) == int(showObj.indexer) else 0
|
||||
|
||||
myDB = db.DBConnection()
|
||||
sqlResults = myDB.select(
|
||||
"SELECT * FROM indexer_mapping WHERE indexer_id = ? AND indexer = ?",
|
||||
[showObj.indexerid, showObj.indexer])
|
||||
|
||||
# for each mapped entry
|
||||
for curResult in sqlResults:
|
||||
nlist = [i for i in curResult if None is not i]
|
||||
# Check if its mapped with both tvdb and tvrage.
|
||||
if 4 <= len(nlist):
|
||||
logger.log(u"Found indexer mapping in cache for show: " + showObj.name, logger.DEBUG)
|
||||
mapped[int(curResult['mindexer'])] = int(curResult['mindexer_id'])
|
||||
break
|
||||
|
||||
else:
|
||||
sql_l = []
|
||||
for indexer in sickbeard.indexerApi().indexers:
|
||||
if indexer == showObj.indexer:
|
||||
mapped[indexer] = showObj.indexerid
|
||||
continue
|
||||
|
||||
lINDEXER_API_PARMS = sickbeard.indexerApi(indexer).api_params.copy()
|
||||
lINDEXER_API_PARMS['custom_ui'] = classes.ShowListUI
|
||||
t = sickbeard.indexerApi(indexer).indexer(**lINDEXER_API_PARMS)
|
||||
|
||||
try:
|
||||
mapped_show = t[showObj.name]
|
||||
except sickbeard.indexer_shownotfound:
|
||||
logger.log(u"Unable to map " + sickbeard.indexerApi(showObj.indexer).name + "->" + sickbeard.indexerApi(
|
||||
indexer).name + " for show: " + showObj.name + ", skipping it", logger.DEBUG)
|
||||
continue
|
||||
|
||||
if mapped_show and len(mapped_show) == 1:
|
||||
logger.log(u"Mapping " + sickbeard.indexerApi(showObj.indexer).name + "->" + sickbeard.indexerApi(
|
||||
indexer).name + " for show: " + showObj.name, logger.DEBUG)
|
||||
|
||||
mapped[indexer] = int(mapped_show[0]['id'])
|
||||
|
||||
logger.log(u"Adding indexer mapping to DB for show: " + showObj.name, logger.DEBUG)
|
||||
|
||||
sql_l.append([
|
||||
"INSERT OR IGNORE INTO indexer_mapping (indexer_id, indexer, mindexer_id, mindexer) VALUES (?,?,?,?)",
|
||||
[showObj.indexerid, showObj.indexer, int(mapped_show[0]['id']), indexer]])
|
||||
|
||||
if len(sql_l) > 0:
|
||||
myDB = db.DBConnection()
|
||||
myDB.mass_action(sql_l)
|
||||
|
||||
return mapped
|
||||
|
||||
|
||||
def touchFile(fname, atime=None):
|
||||
if None != atime:
|
||||
try:
|
||||
|
@ -1102,7 +1071,7 @@ def proxy_setting(proxy_setting, request_url, force=False):
|
|||
return (False, proxy_address)[request_url_match], True
|
||||
|
||||
|
||||
def getURL(url, post_data=None, params=None, headers=None, timeout=30, session=None, json=False, **kwargs):
|
||||
def getURL(url, post_data=None, params=None, headers=None, timeout=30, session=None, json=False, raise_status_code=False, **kwargs):
|
||||
"""
|
||||
Returns a byte-string retrieved from the url provider.
|
||||
"""
|
||||
|
@ -1170,6 +1139,9 @@ def getURL(url, post_data=None, params=None, headers=None, timeout=30, session=N
|
|||
url = urlparse.urlunparse(parsed)
|
||||
resp = session.get(url, timeout=timeout, **kwargs)
|
||||
|
||||
if raise_status_code:
|
||||
resp.raise_for_status()
|
||||
|
||||
if not resp.ok:
|
||||
http_err_text = 'CloudFlare Ray ID' in resp.content and 'CloudFlare reports, "Website is offline"; ' or ''
|
||||
if resp.status_code in clients.http_error_code:
|
||||
|
@ -1183,6 +1155,8 @@ def getURL(url, post_data=None, params=None, headers=None, timeout=30, session=N
|
|||
return
|
||||
|
||||
except requests.exceptions.HTTPError as e:
|
||||
if raise_status_code:
|
||||
resp.raise_for_status()
|
||||
logger.log(u'HTTP error %s while loading URL%s' % (
|
||||
e.errno, _maybe_request_url(e)), logger.WARNING)
|
||||
return
|
||||
|
@ -1479,3 +1453,5 @@ def has_anime():
|
|||
def cpu_sleep():
|
||||
if cpu_presets[sickbeard.CPU_PRESET]:
|
||||
time.sleep(cpu_presets[sickbeard.CPU_PRESET])
|
||||
|
||||
|
||||
|
|
427
sickbeard/indexermapper.py
Normal file
|
@ -0,0 +1,427 @@
|
|||
#
|
||||
# This file is part of SickGear.
|
||||
#
|
||||
# SickGear is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# SickGear is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import datetime
|
||||
import re
|
||||
import traceback
|
||||
|
||||
import requests
|
||||
import sickbeard
|
||||
from collections import OrderedDict
|
||||
from urllib import urlencode
|
||||
from lib.dateutil.parser import parse
|
||||
from lib.unidecode import unidecode
|
||||
from libtrakt import TraktAPI
|
||||
from libtrakt.exceptions import TraktAuthException, TraktException
|
||||
from sickbeard import db, logger
|
||||
from sickbeard.helpers import tryInt, getURL
|
||||
from sickbeard.indexers.indexer_config import (INDEXER_TVDB, INDEXER_TVRAGE, INDEXER_TVMAZE,
|
||||
INDEXER_IMDB, INDEXER_TRAKT, INDEXER_TMDB)
|
||||
from lib.tmdb_api import TMDB
|
||||
from lib.imdb import IMDb
|
||||
|
||||
defunct_indexer = []
|
||||
indexer_list = []
|
||||
tmdb_ids = {INDEXER_TVDB: 'tvdb_id', INDEXER_IMDB: 'imdb_id', INDEXER_TVRAGE: 'tvrage_id'}
|
||||
|
||||
|
||||
class NewIdDict(dict):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(NewIdDict, self).__init__(*args, **kwargs)
|
||||
|
||||
@staticmethod
|
||||
def set_value(value, old_value=None):
|
||||
if old_value is MapStatus.MISMATCH or (0 < value and old_value not in [None, value] and 0 < old_value):
|
||||
return MapStatus.MISMATCH
|
||||
return value
|
||||
|
||||
@staticmethod
|
||||
def get_value(value):
|
||||
if value in [None, 0]:
|
||||
return MapStatus.NOT_FOUND
|
||||
return value
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self.get_value(super(NewIdDict, self).get(key))
|
||||
|
||||
def get(self, key, default=None):
|
||||
return self.get_value(super(NewIdDict, self).get(key, default))
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
super(NewIdDict, self).__setitem__(key, self.set_value(value, self.get(key)))
|
||||
|
||||
def update(self, other=None, **kwargs):
|
||||
if isinstance(other, dict):
|
||||
other = {o: self.set_value(v, self.get(o)) for o, v in other.iteritems()}
|
||||
super(NewIdDict, self).update(other, **kwargs)
|
||||
|
||||
|
||||
class TvmazeDict(OrderedDict):
|
||||
tvmaze_ids = {INDEXER_TVDB: 'thetvdb', INDEXER_IMDB: 'imdb', INDEXER_TVRAGE: 'tvrage'}
|
||||
|
||||
def __init__(self, *args, **kwds):
|
||||
super(TvmazeDict, self).__init__(*args, **kwds)
|
||||
|
||||
def get_url(self, key):
|
||||
if INDEXER_TVMAZE == key:
|
||||
return '%sshows/%s' % (sickbeard.indexerApi(INDEXER_TVMAZE).config['base_url'], self.tvmaze_ids[key])
|
||||
return '%slookup/shows?%s=%s%s' % (sickbeard.indexerApi(INDEXER_TVMAZE).config['base_url'],
|
||||
self.tvmaze_ids[key], ('', 'tt')[key == INDEXER_IMDB],
|
||||
(self[key], '%07d' % self[key])[key == INDEXER_IMDB])
|
||||
|
||||
|
||||
class TraktDict(OrderedDict):
|
||||
trakt_ids = {INDEXER_TVDB: 'tvdb', INDEXER_IMDB: 'imdb', INDEXER_TVRAGE: 'tvrage'}
|
||||
|
||||
def __init__(self, *args, **kwds):
|
||||
super(TraktDict, self).__init__(*args, **kwds)
|
||||
|
||||
def get_url(self, key):
|
||||
return 'search/%s/%s%s?type=show' % (self.trakt_ids[key], ('', 'tt')[key == INDEXER_IMDB],
|
||||
(self[key], '%07d' % self[key])[key == INDEXER_IMDB])
|
||||
|
||||
|
||||
def get_tvmaze_ids(url_tvmaze):
|
||||
ids = {}
|
||||
for url_key in url_tvmaze.iterkeys():
|
||||
try:
|
||||
res = getURL(url=url_tvmaze.get_url(url_key), json=True, raise_status_code=True, timeout=120)
|
||||
if res and 'externals' in res:
|
||||
ids[INDEXER_TVRAGE] = res['externals'].get('tvrage', 0)
|
||||
ids[INDEXER_TVDB] = res['externals'].get('thetvdb', 0)
|
||||
ids[INDEXER_IMDB] = tryInt(str(res['externals'].get('imdb')).replace('tt', ''))
|
||||
ids[INDEXER_TVMAZE] = res.get('id', 0)
|
||||
break
|
||||
except (requests.HTTPError, Exception):
|
||||
pass
|
||||
return {k: v for k, v in ids.iteritems() if v not in (None, '', 0)}
|
||||
|
||||
|
||||
def get_premieredate(show):
|
||||
try:
|
||||
first_ep = show.getEpisode(season=1, episode=1)
|
||||
if first_ep and first_ep.airdate:
|
||||
return first_ep.airdate
|
||||
except (StandardError, Exception):
|
||||
pass
|
||||
return None
|
||||
|
||||
|
||||
def clean_show_name(showname):
|
||||
return re.sub(r'[(\s]*(?:19|20)\d\d[)\s]*$', '', isinstance(showname, unicode) and unidecode(showname) or showname)
|
||||
|
||||
|
||||
def get_tvmaze_by_name(showname, premiere_date):
|
||||
ids = {}
|
||||
try:
|
||||
url = '%ssearch/shows?%s' % (sickbeard.indexerApi(INDEXER_TVMAZE).config['base_url'],
|
||||
urlencode({'q': clean_show_name(showname)}))
|
||||
res = getURL(url=url, json=True, raise_status_code=True, timeout=120)
|
||||
if res:
|
||||
for r in res:
|
||||
if 'show' in r and 'premiered' in r['show'] and 'externals' in r['show']:
|
||||
premiered = parse(r['show']['premiered'], fuzzy=True)
|
||||
if abs(premiere_date - premiered.date()) < datetime.timedelta(days=2):
|
||||
ids[INDEXER_TVRAGE] = r['show']['externals'].get('tvrage', 0)
|
||||
ids[INDEXER_TVDB] = r['show']['externals'].get('thetvdb', 0)
|
||||
ids[INDEXER_IMDB] = tryInt(str(r['show']['externals'].get('imdb')).replace('tt', ''))
|
||||
ids[INDEXER_TVMAZE] = r['show'].get('id', 0)
|
||||
break
|
||||
except (StandardError, Exception):
|
||||
pass
|
||||
return {k: v for k, v in ids.iteritems() if v not in (None, '', 0)}
|
||||
|
||||
|
||||
def get_trakt_ids(url_trakt):
|
||||
ids = {}
|
||||
for url_key in url_trakt.iterkeys():
|
||||
try:
|
||||
res = TraktAPI().trakt_request(url_trakt.get_url(url_key))
|
||||
if res:
|
||||
found = False
|
||||
for r in res:
|
||||
if r.get('type', '') == 'show' and 'show' in r and 'ids' in r['show']:
|
||||
ids[INDEXER_TVDB] = tryInt(r['show']['ids'].get('tvdb', 0))
|
||||
ids[INDEXER_TVRAGE] = tryInt(r['show']['ids'].get('tvrage', 0))
|
||||
ids[INDEXER_IMDB] = tryInt(str(r['show']['ids'].get('imdb')).replace('tt', ''))
|
||||
ids[INDEXER_TRAKT] = tryInt(r['show']['ids'].get('trakt', 0))
|
||||
ids[INDEXER_TMDB] = tryInt(r['show']['ids'].get('tmdb', 0))
|
||||
found = True
|
||||
break
|
||||
if found:
|
||||
break
|
||||
except (TraktAuthException, TraktException, IndexError, KeyError):
|
||||
pass
|
||||
return {k: v for k, v in ids.iteritems() if v not in (None, '', 0)}
|
||||
|
||||
|
||||
def get_imdbid_by_name(name, startyear):
|
||||
ids = {}
|
||||
try:
|
||||
res = IMDb().search_movie(title=name)
|
||||
for r in res:
|
||||
if hasattr(r, 'movieID') and hasattr(r, 'data') and 'kind' in r.data and r.data['kind'] == 'tv series' \
|
||||
and 'year' in r.data and r.data['year'] == startyear:
|
||||
ids[INDEXER_IMDB] = tryInt(r.movieID)
|
||||
except (StandardError, Exception):
|
||||
pass
|
||||
return {k: v for k, v in ids.iteritems() if v not in (None, '', 0)}
|
||||
|
||||
|
||||
def map_indexers_to_show(show_obj, update=False, force=False, recheck=False):
|
||||
"""
|
||||
|
||||
:return: mapped ids
|
||||
:rtype: dict
|
||||
:param show_obj: TVShow Object
|
||||
:param update: add missing + previously not found ids
|
||||
:param force: search for and replace all mapped/missing ids (excluding NO_AUTOMATIC_CHANGE flagged)
|
||||
:param recheck: load all ids, don't remove existing
|
||||
"""
|
||||
mapped = {}
|
||||
|
||||
# init mapped indexers object
|
||||
for indexer in indexer_list:
|
||||
mapped[indexer] = {'id': (0, show_obj.indexerid)[int(indexer) == int(show_obj.indexer)],
|
||||
'status': (MapStatus.NONE, MapStatus.SOURCE)[int(indexer) == int(show_obj.indexer)],
|
||||
'date': datetime.date.fromordinal(1)}
|
||||
|
||||
my_db = db.DBConnection()
|
||||
sql_results = my_db.select('SELECT' + ' * FROM indexer_mapping WHERE indexer_id = ? AND indexer = ?',
|
||||
[show_obj.indexerid, show_obj.indexer])
|
||||
|
||||
# for each mapped entry
|
||||
for curResult in sql_results:
|
||||
date = tryInt(curResult['date'])
|
||||
mapped[int(curResult['mindexer'])] = {'status': int(curResult['status']),
|
||||
'id': int(curResult['mindexer_id']),
|
||||
'date': datetime.date.fromordinal(date if 0 < date else 1)}
|
||||
|
||||
# get list of needed ids
|
||||
mis_map = [k for k, v in mapped.iteritems() if (v['status'] not in [
|
||||
MapStatus.NO_AUTOMATIC_CHANGE, MapStatus.SOURCE])
|
||||
and ((0 == v['id'] and MapStatus.NONE == v['status'])
|
||||
or force or recheck or (update and 0 == v['id'] and k not in defunct_indexer))]
|
||||
if mis_map:
|
||||
url_tvmaze = TvmazeDict()
|
||||
url_trakt = TraktDict()
|
||||
if show_obj.indexer == INDEXER_TVDB or show_obj.indexer == INDEXER_TVRAGE:
|
||||
url_tvmaze[show_obj.indexer] = show_obj.indexerid
|
||||
url_trakt[show_obj.indexer] = show_obj.indexerid
|
||||
elif show_obj.indexer == INDEXER_TVMAZE:
|
||||
url_tvmaze[INDEXER_TVMAZE] = show_obj.indexer
|
||||
if show_obj.imdbid and re.search(r'\d+$', show_obj.imdbid):
|
||||
url_tvmaze[INDEXER_IMDB] = tryInt(re.search(r'(?:tt)?(\d+)', show_obj.imdbid).group(1))
|
||||
url_trakt[INDEXER_IMDB] = tryInt(re.search(r'(?:tt)?(\d+)', show_obj.imdbid).group(1))
|
||||
for m, v in mapped.iteritems():
|
||||
if m != show_obj.indexer and m in [INDEXER_TVDB, INDEXER_TVRAGE, INDEXER_TVRAGE, INDEXER_IMDB] and \
|
||||
0 < v.get('id', 0):
|
||||
url_tvmaze[m] = v['id']
|
||||
|
||||
new_ids = NewIdDict()
|
||||
|
||||
if isinstance(show_obj.imdbid, basestring) and re.search(r'\d+$', show_obj.imdbid):
|
||||
new_ids[INDEXER_IMDB] = tryInt(re.search(r'(?:tt)?(\d+)', show_obj.imdbid))
|
||||
|
||||
if 0 < len(url_tvmaze):
|
||||
new_ids.update(get_tvmaze_ids(url_tvmaze))
|
||||
|
||||
for m, v in new_ids.iteritems():
|
||||
if m != show_obj.indexer and m in [INDEXER_TVDB, INDEXER_TVRAGE, INDEXER_TVRAGE, INDEXER_IMDB] and 0 < v:
|
||||
url_trakt[m] = v
|
||||
|
||||
if url_trakt:
|
||||
new_ids.update(get_trakt_ids(url_trakt))
|
||||
|
||||
if INDEXER_TVMAZE not in new_ids:
|
||||
new_url_tvmaze = TvmazeDict()
|
||||
for k, v in new_ids.iteritems():
|
||||
if k != show_obj.indexer and k in [INDEXER_TVDB, INDEXER_TVRAGE, INDEXER_TVRAGE, INDEXER_IMDB] \
|
||||
and 0 < v and k not in url_tvmaze:
|
||||
new_url_tvmaze[k] = v
|
||||
|
||||
if 0 < len(new_url_tvmaze):
|
||||
new_ids.update(get_tvmaze_ids(new_url_tvmaze))
|
||||
|
||||
if INDEXER_TVMAZE not in new_ids:
|
||||
f_date = get_premieredate(show_obj)
|
||||
if f_date and f_date is not datetime.date.fromordinal(1):
|
||||
tvids = {k: v for k, v in get_tvmaze_by_name(show_obj.name, f_date).iteritems() if k == INDEXER_TVMAZE
|
||||
or k not in new_ids or new_ids.get(k) in (None, 0, '', MapStatus.NOT_FOUND)}
|
||||
new_ids.update(tvids)
|
||||
|
||||
if INDEXER_TRAKT not in new_ids:
|
||||
new_url_trakt = TraktDict()
|
||||
for k, v in new_ids.iteritems():
|
||||
if k != show_obj.indexer and k in [INDEXER_TVDB, INDEXER_TVRAGE, INDEXER_IMDB] and 0 < v \
|
||||
and k not in url_trakt:
|
||||
new_url_trakt[k] = v
|
||||
|
||||
if 0 < len(new_url_trakt):
|
||||
new_ids.update(get_trakt_ids(new_url_trakt))
|
||||
|
||||
if INDEXER_IMDB not in new_ids:
|
||||
new_ids.update(get_imdbid_by_name(show_obj.name, show_obj.startyear))
|
||||
|
||||
if INDEXER_TMDB in mis_map \
|
||||
and (None is new_ids.get(INDEXER_TMDB) or MapStatus.NOT_FOUND == new_ids.get(INDEXER_TMDB)) \
|
||||
and (0 < mapped.get(INDEXER_TVDB, {'id': 0}).get('id', 0) or 0 < new_ids.get(INDEXER_TVDB, 0)
|
||||
or 0 < mapped.get(INDEXER_IMDB, {'id': 0}).get('id', 0) or 0 < new_ids.get(INDEXER_TMDB, 0)
|
||||
or 0 < mapped.get(INDEXER_TVRAGE, {'id': 0}).get('id', 0) or 0 < new_ids.get(INDEXER_TVRAGE, 0)):
|
||||
try:
|
||||
tmdb = TMDB(sickbeard.TMDB_API_KEY)
|
||||
for d in [INDEXER_TVDB, INDEXER_IMDB, INDEXER_TVRAGE]:
|
||||
c = (new_ids.get(d), mapped.get(d, {'id': 0}).get('id'))[0 < mapped.get(d, {'id': 0}).get('id', 0)]
|
||||
if 0 >= c:
|
||||
continue
|
||||
if INDEXER_IMDB == d:
|
||||
c = 'tt%07d' % c
|
||||
if None is not c and 0 < c:
|
||||
tmdb_data = tmdb.Find(c).info({'external_source': tmdb_ids[d]})
|
||||
if isinstance(tmdb_data, dict) \
|
||||
and 'tv_results' in tmdb_data and 0 < len(tmdb_data['tv_results']) \
|
||||
and 'id' in tmdb_data['tv_results'][0] and 0 < tryInt(tmdb_data['tv_results'][0]['id']):
|
||||
new_ids[INDEXER_TMDB] = tryInt(tmdb_data['tv_results'][0]['id'])
|
||||
break
|
||||
except (StandardError, Exception):
|
||||
pass
|
||||
|
||||
if INDEXER_TMDB not in new_ids:
|
||||
try:
|
||||
tmdb = TMDB(sickbeard.TMDB_API_KEY)
|
||||
tmdb_data = tmdb.Search().tv(params={'query': clean_show_name(show_obj.name),
|
||||
'first_air_date_year': show_obj.startyear})
|
||||
for s in tmdb_data.get('results'):
|
||||
if clean_show_name(s['name']) == clean_show_name(show_obj.name):
|
||||
new_ids[INDEXER_TMDB] = tryInt(s['id'])
|
||||
break
|
||||
except (StandardError, Exception):
|
||||
pass
|
||||
|
||||
for i in indexer_list:
|
||||
if i != show_obj.indexer and i in mis_map and 0 != new_ids.get(i, 0):
|
||||
if 0 > new_ids[i]:
|
||||
mapped[i] = {'status': new_ids[i], 'id': 0}
|
||||
elif force or not recheck or 0 >= mapped.get(i, {'id': 0}).get('id', 0):
|
||||
mapped[i] = {'status': MapStatus.NONE, 'id': new_ids[i]}
|
||||
|
||||
if [k for k in mis_map if 0 != mapped.get(k, {'id': 0, 'status': 0})['id'] or
|
||||
mapped.get(k, {'id': 0, 'status': 0})['status'] not in [MapStatus.NONE, MapStatus.SOURCE]]:
|
||||
sql_l = []
|
||||
today = datetime.date.today()
|
||||
date = today.toordinal()
|
||||
for indexer in indexer_list:
|
||||
|
||||
if show_obj.indexer == indexer or indexer not in mis_map:
|
||||
continue
|
||||
|
||||
if 0 != mapped[indexer]['id'] or MapStatus.NONE != mapped[indexer]['status']:
|
||||
mapped[indexer]['date'] = today
|
||||
sql_l.append([
|
||||
'INSERT OR REPLACE INTO indexer_mapping (' +
|
||||
'indexer_id, indexer, mindexer_id, mindexer, date, status) VALUES (?,?,?,?,?,?)',
|
||||
[show_obj.indexerid, show_obj.indexer, mapped[indexer]['id'],
|
||||
indexer, date, mapped[indexer]['status']]])
|
||||
else:
|
||||
sql_l.append([
|
||||
'DELETE' + ' FROM indexer_mapping WHERE indexer_id = ? AND indexer = ? AND mindexer = ?',
|
||||
[show_obj.indexerid, show_obj.indexer, indexer]])
|
||||
|
||||
if 0 < len(sql_l):
|
||||
logger.log('Adding indexer mapping to DB for show: %s' % show_obj.name, logger.DEBUG)
|
||||
my_db = db.DBConnection()
|
||||
my_db.mass_action(sql_l)
|
||||
|
||||
show_obj.ids = mapped
|
||||
return mapped
|
||||
|
||||
|
||||
def save_mapping(show_obj, save_map=None):
|
||||
sql_l = []
|
||||
today = datetime.date.today()
|
||||
date = today.toordinal()
|
||||
for indexer in indexer_list:
|
||||
|
||||
if show_obj.indexer == indexer or (isinstance(save_map, list) and indexer not in save_map):
|
||||
continue
|
||||
|
||||
if 0 != show_obj.ids[indexer]['id'] or MapStatus.NONE != show_obj.ids[indexer]['status']:
|
||||
show_obj.ids[indexer]['date'] = today
|
||||
sql_l.append([
|
||||
'INSERT OR REPLACE INTO indexer_mapping (' +
|
||||
'indexer_id, indexer, mindexer_id, mindexer, date, status) VALUES (?,?,?,?,?,?)',
|
||||
[show_obj.indexerid, show_obj.indexer, show_obj.ids[indexer]['id'],
|
||||
indexer, date, show_obj.ids[indexer]['status']]])
|
||||
else:
|
||||
sql_l.append([
|
||||
'DELETE' + ' FROM indexer_mapping WHERE indexer_id = ? AND indexer = ? AND mindexer = ?',
|
||||
[show_obj.indexerid, show_obj.indexer, indexer]])
|
||||
|
||||
if 0 < len(sql_l):
|
||||
logger.log('Saving indexer mapping to DB for show: %s' % show_obj.name, logger.DEBUG)
|
||||
my_db = db.DBConnection()
|
||||
my_db.mass_action(sql_l)
|
||||
|
||||
|
||||
def del_mapping(indexer, indexerid):
|
||||
my_db = db.DBConnection()
|
||||
my_db.action('DELETE' + ' FROM indexer_mapping WHERE indexer_id = ? AND indexer = ?', [indexerid, indexer])
|
||||
|
||||
|
||||
def should_recheck_update_ids(show):
|
||||
try:
|
||||
today = datetime.date.today()
|
||||
ids_updated = min([v.get('date') for k, v in show.ids.iteritems() if k != show.indexer and
|
||||
k not in defunct_indexer] or [datetime.date.fromtimestamp(1)])
|
||||
if today - ids_updated >= datetime.timedelta(days=365):
|
||||
return True
|
||||
first_ep = show.getEpisode(season=1, episode=1)
|
||||
if first_ep and first_ep.airdate and first_ep.airdate > datetime.date.fromtimestamp(1):
|
||||
show_age = (today - first_ep.airdate).days
|
||||
for d in [365, 270, 180, 135, 90, 60, 30, 16, 9] + range(4, -4, -1):
|
||||
if d <= show_age:
|
||||
return ids_updated < (first_ep.airdate + datetime.timedelta(days=d))
|
||||
except (StandardError, Exception):
|
||||
pass
|
||||
return False
|
||||
|
||||
|
||||
def load_mapped_ids(**kwargs):
|
||||
logger.log('Start loading Indexer mappings...')
|
||||
for s in sickbeard.showList:
|
||||
with s.lock:
|
||||
n_kargs = kwargs.copy()
|
||||
if 'update' in kwargs and should_recheck_update_ids(s):
|
||||
n_kargs['recheck'] = True
|
||||
try:
|
||||
s.ids = sickbeard.indexermapper.map_indexers_to_show(s, **n_kargs)
|
||||
except (StandardError, Exception):
|
||||
logger.log('Error loading mapped id\'s for show: %s' % s.name, logger.ERROR)
|
||||
logger.log('Traceback: %s' % traceback.format_exc(), logger.ERROR)
|
||||
logger.log('Indexer mappings loaded')
|
||||
|
||||
|
||||
class MapStatus:
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
SOURCE = 1
|
||||
NONE = 0
|
||||
NOT_FOUND = -1
|
||||
MISMATCH = -2
|
||||
NO_AUTOMATIC_CHANGE = -100
|
||||
|
||||
allstatus = [SOURCE, NONE, NOT_FOUND, MISMATCH, NO_AUTOMATIC_CHANGE]
|
|
@ -103,9 +103,12 @@ class indexerApi(object):
|
|||
def api_params(self):
|
||||
if self.indexerID:
|
||||
if sickbeard.CACHE_DIR:
|
||||
indexerConfig[self.indexerID]['api_params']['cache'] = os.path.join(sickbeard.CACHE_DIR, 'indexers', self.name)
|
||||
indexerConfig[self.indexerID]['api_params']['cache'] = os.path.join(
|
||||
sickbeard.CACHE_DIR, 'indexers', self.name)
|
||||
if sickbeard.PROXY_SETTING and sickbeard.PROXY_INDEXERS:
|
||||
(proxy_address, pac_found) = proxy_setting(sickbeard.PROXY_SETTING, indexerConfig[self.indexerID]['base_url'], force=True)
|
||||
(proxy_address, pac_found) = proxy_setting(sickbeard.PROXY_SETTING,
|
||||
indexerConfig[self.indexerID]['base_url'],
|
||||
force=True)
|
||||
if proxy_address:
|
||||
indexerConfig[self.indexerID]['api_params']['proxy'] = proxy_address
|
||||
|
||||
|
@ -118,8 +121,15 @@ class indexerApi(object):
|
|||
|
||||
@property
|
||||
def indexers(self):
|
||||
return dict((int(x['id']), x['name']) for x in indexerConfig.values() if not x['mapped_only'])
|
||||
|
||||
@property
|
||||
def all_indexers(self):
|
||||
"""
|
||||
return all indexers including mapped only indexers
|
||||
"""
|
||||
return dict((int(x['id']), x['name']) for x in indexerConfig.values())
|
||||
|
||||
|
||||
def get_xem_supported_indexers():
|
||||
return dict((key, value) for (key, value) in indexerConfig.items() if value['xem_origin'])
|
||||
@property
|
||||
def xem_supported_indexers(self):
|
||||
return dict((int(x['id']), x['name']) for x in indexerConfig.values() if x.get('xem_origin'))
|
||||
|
|
|
@ -3,51 +3,134 @@ from lib.tvrage_api.tvrage_api import TVRage
|
|||
|
||||
INDEXER_TVDB = 1
|
||||
INDEXER_TVRAGE = 2
|
||||
INDEXER_TVMAZE = 3
|
||||
|
||||
initConfig = {}
|
||||
indexerConfig = {}
|
||||
# mapped only indexer
|
||||
INDEXER_IMDB = 100
|
||||
INDEXER_TRAKT = 101
|
||||
INDEXER_TMDB = 102
|
||||
# end mapped only indexer
|
||||
|
||||
initConfig['valid_languages'] = [
|
||||
"da", "fi", "nl", "de", "it", "es", "fr", "pl", "hu", "el", "tr",
|
||||
"ru", "he", "ja", "pt", "zh", "cs", "sl", "hr", "ko", "en", "sv", "no"]
|
||||
initConfig = {
|
||||
'valid_languages': ['da', 'fi', 'nl', 'de', 'it', 'es', 'fr', 'pl', 'hu', 'el', 'tr',
|
||||
'ru', 'he', 'ja', 'pt', 'zh', 'cs', 'sl', 'hr', 'ko', 'en', 'sv', 'no'],
|
||||
'langabbv_to_id': dict(el=20, en=7, zh=27, it=15, cs=28, es=16, ru=22, nl=13, pt=26, no=9, tr=21, pl=18,
|
||||
fr=17, hr=31, de=14, da=10, fi=11, hu=19, ja=25, he=24, ko=32, sv=8, sl=30)}
|
||||
|
||||
initConfig['langabbv_to_id'] = {
|
||||
'el': 20, 'en': 7, 'zh': 27,
|
||||
'it': 15, 'cs': 28, 'es': 16, 'ru': 22, 'nl': 13, 'pt': 26, 'no': 9,
|
||||
'tr': 21, 'pl': 18, 'fr': 17, 'hr': 31, 'de': 14, 'da': 10, 'fi': 11,
|
||||
'hu': 19, 'ja': 25, 'he': 24, 'ko': 32, 'sv': 8, 'sl': 30}
|
||||
|
||||
indexerConfig[INDEXER_TVDB] = {
|
||||
'id': INDEXER_TVDB,
|
||||
'name': 'theTVDB',
|
||||
'module': Tvdb,
|
||||
'api_params': {'apikey': 'F9C450E78D99172E',
|
||||
'language': 'en',
|
||||
'useZip': True,
|
||||
},
|
||||
'active': True,
|
||||
indexerConfig = {
|
||||
INDEXER_TVDB: dict(
|
||||
main_url='https://thetvdb.com/',
|
||||
id=INDEXER_TVDB,
|
||||
name='TheTVDB',
|
||||
module=Tvdb,
|
||||
api_params=dict(apikey='F9C450E78D99172E', language='en', useZip=True),
|
||||
active=True,
|
||||
dupekey='',
|
||||
mapped_only=False,
|
||||
icon='thetvdb16.png',
|
||||
),
|
||||
INDEXER_TVRAGE: dict(
|
||||
main_url='http://tvrage.com/',
|
||||
id=INDEXER_TVRAGE,
|
||||
name='TVRage',
|
||||
module=TVRage,
|
||||
api_params=dict(apikey='Uhewg1Rr0o62fvZvUIZt', language='en'),
|
||||
active=False,
|
||||
dupekey='tvr',
|
||||
mapped_only=False,
|
||||
icon='tvrage16.png',
|
||||
),
|
||||
INDEXER_TVMAZE: dict(
|
||||
main_url='http://www.tvmaze.com/',
|
||||
id=INDEXER_TVMAZE,
|
||||
name='TVmaze',
|
||||
module=None,
|
||||
api_params={},
|
||||
active=False,
|
||||
dupekey='tvm',
|
||||
mapped_only=True,
|
||||
icon='tvmaze16.png',
|
||||
),
|
||||
INDEXER_IMDB: dict(
|
||||
main_url='https://www.imdb.com/',
|
||||
id=INDEXER_IMDB,
|
||||
name='IMDb',
|
||||
module=None,
|
||||
api_params={},
|
||||
active=False,
|
||||
dupekey='imdb',
|
||||
mapped_only=True,
|
||||
icon='imdb16.png',
|
||||
),
|
||||
INDEXER_TRAKT: dict(
|
||||
main_url='https://www.trakt.tv/',
|
||||
id=INDEXER_TRAKT,
|
||||
name='Trakt',
|
||||
module=None,
|
||||
api_params={},
|
||||
active=False,
|
||||
dupekey='trakt',
|
||||
mapped_only=True,
|
||||
icon='trakt16.png',
|
||||
),
|
||||
INDEXER_TMDB: dict(
|
||||
main_url='https://www.themoviedb.org/',
|
||||
id=INDEXER_TMDB,
|
||||
name='TMDb',
|
||||
module=None,
|
||||
api_params={},
|
||||
active=False,
|
||||
dupekey='tmdb',
|
||||
mapped_only=True,
|
||||
icon='tmdb16.png',
|
||||
)
|
||||
}
|
||||
|
||||
indexerConfig[INDEXER_TVRAGE] = {
|
||||
'id': INDEXER_TVRAGE,
|
||||
'name': 'TVRage',
|
||||
'module': TVRage,
|
||||
'api_params': {'apikey': 'Uhewg1Rr0o62fvZvUIZt',
|
||||
'language': 'en',
|
||||
},
|
||||
'active': False,
|
||||
}
|
||||
info_src = INDEXER_TVDB
|
||||
indexerConfig[info_src].update(dict(
|
||||
base_url=(indexerConfig[info_src]['main_url'] +
|
||||
'api/%(apikey)s/series/' % indexerConfig[info_src]['api_params']),
|
||||
show_url='%s?tab=series&id=' % indexerConfig[info_src]['main_url'],
|
||||
finder=(indexerConfig[info_src]['main_url'] +
|
||||
'index.php?fieldlocation=2&language=7&order=translation&searching=Search&tab=advancedsearch&seriesname=%s'),
|
||||
scene_url='https://midgetspy.github.io/sb_tvdb_scene_exceptions/exceptions.txt',
|
||||
xem_origin='tvdb',
|
||||
))
|
||||
|
||||
# TVDB Indexer Settings
|
||||
indexerConfig[INDEXER_TVDB]['xem_origin'] = 'tvdb'
|
||||
indexerConfig[INDEXER_TVDB]['icon'] = 'thetvdb16.png'
|
||||
indexerConfig[INDEXER_TVDB]['scene_url'] = 'http://midgetspy.github.io/sb_tvdb_scene_exceptions/exceptions.txt'
|
||||
indexerConfig[INDEXER_TVDB]['show_url'] = 'http://thetvdb.com/?tab=series&id='
|
||||
indexerConfig[INDEXER_TVDB]['base_url'] = 'http://thetvdb.com/api/%(apikey)s/series/' % indexerConfig[INDEXER_TVDB]['api_params']
|
||||
info_src = INDEXER_TVRAGE
|
||||
indexerConfig[info_src].update(dict(
|
||||
base_url=(indexerConfig[info_src]['main_url'] +
|
||||
'showinfo.php?key=%(apikey)s&sid=' % indexerConfig[info_src]['api_params']),
|
||||
show_url='%sshows/id-' % indexerConfig[info_src]['main_url'],
|
||||
scene_url='https://sickgear.github.io/sg_tvrage_scene_exceptions/exceptions.txt',
|
||||
xem_origin='rage',
|
||||
defunct=True,
|
||||
))
|
||||
|
||||
# TVRAGE Indexer Settings
|
||||
indexerConfig[INDEXER_TVRAGE]['xem_origin'] = 'rage'
|
||||
indexerConfig[INDEXER_TVRAGE]['icon'] = 'tvrage16.png'
|
||||
indexerConfig[INDEXER_TVRAGE]['scene_url'] = 'https://sickgear.github.io/sg_tvrage_scene_exceptions/exceptions.txt'
|
||||
indexerConfig[INDEXER_TVRAGE]['show_url'] = 'http://tvrage.com/shows/id-'
|
||||
indexerConfig[INDEXER_TVRAGE]['base_url'] = 'http://tvrage.com/showinfo.php?key=%(apikey)s&sid=' % indexerConfig[INDEXER_TVRAGE]['api_params']
|
||||
info_src = INDEXER_TVMAZE
|
||||
indexerConfig[info_src].update(dict(
|
||||
base_url='http://api.tvmaze.com/',
|
||||
show_url='%sshows/' % indexerConfig[info_src]['main_url'],
|
||||
finder='%ssearch?q=%s' % (indexerConfig[info_src]['main_url'], '%s'),
|
||||
))
|
||||
|
||||
info_src = INDEXER_IMDB
|
||||
indexerConfig[info_src].update(dict(
|
||||
base_url=indexerConfig[info_src]['main_url'],
|
||||
show_url='%stitle/tt' % indexerConfig[info_src]['main_url'],
|
||||
finder='%sfind?q=%s&s=tt&ttype=tv&ref_=fn_tv' % (indexerConfig[info_src]['main_url'], '%s'),
|
||||
))
|
||||
|
||||
info_src = INDEXER_TRAKT
|
||||
indexerConfig[info_src].update(dict(
|
||||
base_url=indexerConfig[info_src]['main_url'],
|
||||
show_url='%sshows/' % indexerConfig[info_src]['main_url'],
|
||||
finder='%ssearch/shows?query=%s' % (indexerConfig[info_src]['main_url'], '%s'),
|
||||
))
|
||||
|
||||
info_src = INDEXER_TMDB
|
||||
indexerConfig[info_src].update(dict(
|
||||
base_url=indexerConfig[info_src]['main_url'],
|
||||
show_url='%stv/' % indexerConfig[info_src]['main_url'],
|
||||
finder='%ssearch/tv?query=%s' % (indexerConfig[info_src]['main_url'], '%s'),
|
||||
))
|
||||
|
|
|
@ -74,7 +74,7 @@ def buildNameCache(show=None):
|
|||
nameCache = dict(
|
||||
(sickbeard.helpers.full_sanitizeSceneName(x.name), [x.indexerid, -1]) for x in sickbeard.showList if x)
|
||||
|
||||
cacheDB = db.DBConnection('cache.db')
|
||||
cacheDB = db.DBConnection()
|
||||
|
||||
cache_results = cacheDB.select(
|
||||
'SELECT show_name, indexer_id, season FROM scene_exceptions WHERE indexer_id IN (%s)' % ','.join(
|
||||
|
|
|
@ -63,8 +63,27 @@ class TraktNotifier:
|
|||
]
|
||||
}
|
||||
|
||||
indexer = ('tvrage', 'tvdb')[1 == ep_obj.show.indexer]
|
||||
data['shows'][0]['ids'][indexer] = ep_obj.show.indexerid
|
||||
from sickbeard.indexers.indexer_config import INDEXER_TVDB, INDEXER_TVRAGE, INDEXER_IMDB, INDEXER_TMDB, \
|
||||
INDEXER_TRAKT
|
||||
|
||||
supported_indexer = {INDEXER_TRAKT: 'trakt', INDEXER_TVDB: 'tvdb', INDEXER_TVRAGE: 'tvrage',
|
||||
INDEXER_IMDB: 'imdb', INDEXER_TMDB: 'tmdb'}
|
||||
indexer_priorities = [INDEXER_TRAKT, INDEXER_TVDB, INDEXER_TVRAGE, INDEXER_IMDB, INDEXER_TMDB]
|
||||
|
||||
indexer = indexerid = None
|
||||
if ep_obj.show.indexer in supported_indexer:
|
||||
indexer, indexerid = supported_indexer[ep_obj.show.indexer], ep_obj.show.indexerid
|
||||
else:
|
||||
for i in indexer_priorities:
|
||||
if ep_obj.show.ids.get(i, {'id': 0}).get('id', 0) > 0:
|
||||
indexer, indexerid = supported_indexer[i], ep_obj.show.ids[i]['id']
|
||||
break
|
||||
|
||||
if indexer is None or indexerid is None:
|
||||
logger.log('Missing trakt supported id, could not add to collection.', logger.WARNING)
|
||||
return
|
||||
|
||||
data['shows'][0]['ids'][indexer] = indexerid
|
||||
|
||||
# Add Season and Episode + Related Episodes
|
||||
data['shows'][0]['seasons'] = [{'number': ep_obj.season, 'episodes': []}]
|
||||
|
|
|
@ -80,10 +80,8 @@ def send_nzb(nzb, proper=False):
|
|||
# if it aired recently make it high priority and generate DupeKey/Score
|
||||
for curEp in nzb.episodes:
|
||||
if '' == dupekey:
|
||||
if 1 == curEp.show.indexer:
|
||||
dupekey = 'SickGear-%s' % curEp.show.indexerid
|
||||
elif 2 == curEp.show.indexer:
|
||||
dupekey = 'SickGear-tvr%s' % curEp.show.indexerid
|
||||
dupekey = "SickGear-%s%s" % (
|
||||
sickbeard.indexerApi(curEp.show.indexer).config.get('dupekey', ''), curEp.show.indexerid)
|
||||
dupekey += '-%s.%s' % (curEp.season, curEp.episode)
|
||||
|
||||
if datetime.date.today() - curEp.airdate <= datetime.timedelta(days=7):
|
||||
|
|
|
@ -61,7 +61,7 @@ class PostProcessor(object):
|
|||
|
||||
IGNORED_FILESTRINGS = ['/.AppleDouble/', '.DS_Store']
|
||||
|
||||
def __init__(self, file_path, nzb_name=None, process_method=None, force_replace=None, use_trash=None, webhandler=None):
|
||||
def __init__(self, file_path, nzb_name=None, process_method=None, force_replace=None, use_trash=None, webhandler=None, showObj=None):
|
||||
"""
|
||||
Creates a new post processor with the given file path and optionally an NZB name.
|
||||
|
||||
|
@ -89,6 +89,8 @@ class PostProcessor(object):
|
|||
|
||||
self.webhandler = webhandler
|
||||
|
||||
self.showObj = showObj
|
||||
|
||||
self.in_history = False
|
||||
|
||||
self.release_group = None
|
||||
|
@ -475,7 +477,7 @@ class PostProcessor(object):
|
|||
return to_return
|
||||
|
||||
# parse the name to break it into show name, season, and episode
|
||||
np = NameParser(resource, try_scene_exceptions=True, convert=True)
|
||||
np = NameParser(resource, try_scene_exceptions=True, convert=True, showObj=self.showObj)
|
||||
parse_result = np.parse(name)
|
||||
self._log(u'Parsed %s<br />.. from %s' % (str(parse_result).decode('utf-8', 'xmlcharrefreplace'), name), logger.DEBUG)
|
||||
|
||||
|
|
|
@ -161,7 +161,7 @@ class ProcessTVShow(object):
|
|||
|
||||
return result
|
||||
|
||||
def process_dir(self, dir_name, nzb_name=None, process_method=None, force=False, force_replace=None, failed=False, pp_type='auto', cleanup=False):
|
||||
def process_dir(self, dir_name, nzb_name=None, process_method=None, force=False, force_replace=None, failed=False, pp_type='auto', cleanup=False, showObj=None):
|
||||
"""
|
||||
Scans through the files in dir_name and processes whatever media files it finds
|
||||
|
||||
|
@ -240,7 +240,7 @@ class ProcessTVShow(object):
|
|||
|
||||
# Don't Link media when the media is extracted from a rar in the same path
|
||||
if process_method in ('hardlink', 'symlink') and video_in_rar:
|
||||
self._process_media(path, video_in_rar, nzb_name, 'move', force, force_replace)
|
||||
self._process_media(path, video_in_rar, nzb_name, 'move', force, force_replace, showObj=showObj)
|
||||
self._delete_files(path, [ek.ek(os.path.relpath, item, path) for item in work_files], force=True)
|
||||
video_batch = set(video_files) - set(video_in_rar)
|
||||
else:
|
||||
|
@ -258,7 +258,7 @@ class ProcessTVShow(object):
|
|||
|
||||
video_batch = set(video_batch) - set(video_pick)
|
||||
|
||||
self._process_media(path, video_pick, nzb_name, process_method, force, force_replace, use_trash=cleanup)
|
||||
self._process_media(path, video_pick, nzb_name, process_method, force, force_replace, use_trash=cleanup, showObj=showObj)
|
||||
|
||||
except OSError as e:
|
||||
logger.log('Batch skipped, %s%s' %
|
||||
|
@ -289,7 +289,7 @@ class ProcessTVShow(object):
|
|||
|
||||
# Don't Link media when the media is extracted from a rar in the same path
|
||||
if process_method in ('hardlink', 'symlink') and video_in_rar:
|
||||
self._process_media(walk_path, video_in_rar, nzb_name, 'move', force, force_replace)
|
||||
self._process_media(walk_path, video_in_rar, nzb_name, 'move', force, force_replace, showObj=showObj)
|
||||
video_batch = set(video_files) - set(video_in_rar)
|
||||
else:
|
||||
video_batch = video_files
|
||||
|
@ -307,7 +307,7 @@ class ProcessTVShow(object):
|
|||
|
||||
video_batch = set(video_batch) - set(video_pick)
|
||||
|
||||
self._process_media(walk_path, video_pick, nzb_name, process_method, force, force_replace, use_trash=cleanup)
|
||||
self._process_media(walk_path, video_pick, nzb_name, process_method, force, force_replace, use_trash=cleanup, showObj=showObj)
|
||||
|
||||
except OSError as e:
|
||||
logger.log('Batch skipped, %s%s' %
|
||||
|
@ -764,7 +764,7 @@ class ProcessTVShow(object):
|
|||
|
||||
return False
|
||||
|
||||
def _process_media(self, process_path, video_files, nzb_name, process_method, force, force_replace, use_trash=False):
|
||||
def _process_media(self, process_path, video_files, nzb_name, process_method, force, force_replace, use_trash=False, showObj=None):
|
||||
|
||||
processor = None
|
||||
for cur_video_file in video_files:
|
||||
|
@ -776,7 +776,7 @@ class ProcessTVShow(object):
|
|||
cur_video_file_path = ek.ek(os.path.join, process_path, cur_video_file)
|
||||
|
||||
try:
|
||||
processor = postProcessor.PostProcessor(cur_video_file_path, nzb_name, process_method, force_replace, use_trash=use_trash, webhandler=self.webhandler)
|
||||
processor = postProcessor.PostProcessor(cur_video_file_path, nzb_name, process_method, force_replace, use_trash=use_trash, webhandler=self.webhandler, showObj=showObj)
|
||||
file_success = processor.process()
|
||||
process_fail_message = ''
|
||||
except exceptions.PostProcessingFailed:
|
||||
|
@ -850,6 +850,6 @@ class ProcessTVShow(object):
|
|||
|
||||
|
||||
# backward compatibility prevents the case of this function name from being updated to PEP8
|
||||
def processDir(dir_name, nzb_name=None, process_method=None, force=False, force_replace=None, failed=False, type='auto', cleanup=False, webhandler=None):
|
||||
def processDir(dir_name, nzb_name=None, process_method=None, force=False, force_replace=None, failed=False, type='auto', cleanup=False, webhandler=None, showObj=None):
|
||||
# backward compatibility prevents the case of this function name from being updated to PEP8
|
||||
return ProcessTVShow(webhandler).process_dir(dir_name, nzb_name, process_method, force, force_replace, failed, type, cleanup)
|
||||
return ProcessTVShow(webhandler).process_dir(dir_name, nzb_name, process_method, force, force_replace, failed, type, cleanup, showObj)
|
||||
|
|
|
@ -104,6 +104,7 @@ def _get_proper_list(aired_since_shows, recent_shows, recent_anime):
|
|||
name = _generic_name(x.name)
|
||||
if name not in propers:
|
||||
try:
|
||||
np = NameParser(False, try_scene_exceptions=True, showObj=x.parsed_show)
|
||||
parse_result = np.parse(x.name)
|
||||
if parse_result.series_name and parse_result.episode_numbers and \
|
||||
parse_result.show.indexerid in recent_shows + recent_anime:
|
||||
|
|
|
@ -271,7 +271,7 @@ class GenericProvider:
|
|||
quality = Quality.sceneQuality(title, anime)
|
||||
return quality
|
||||
|
||||
def _search_provider(self, search_params, search_mode='eponly', epcount=0, age=0):
|
||||
def _search_provider(self, search_params, search_mode='eponly', epcount=0, age=0, **kwargs):
|
||||
return []
|
||||
|
||||
def _season_strings(self, episode):
|
||||
|
@ -340,7 +340,10 @@ class GenericProvider:
|
|||
'udp://tracker.opentrackr.org:1337/announce', 'udp://tracker.torrent.eu.org:451/announce',
|
||||
'udp://tracker.trackerfix.com:80/announce'])) or None)
|
||||
|
||||
def find_search_results(self, show, episodes, search_mode, manual_search=False):
|
||||
def get_show(self, item, **kwargs):
|
||||
return None
|
||||
|
||||
def find_search_results(self, show, episodes, search_mode, manual_search=False, **kwargs):
|
||||
|
||||
self._check_auth()
|
||||
self.show = show
|
||||
|
@ -377,6 +380,10 @@ class GenericProvider:
|
|||
for cur_param in search_params:
|
||||
item_list += self._search_provider(cur_param, search_mode=search_mode, epcount=len(episodes))
|
||||
|
||||
return self.finish_find_search_results(show, episodes, search_mode, manual_search, results, item_list)
|
||||
|
||||
def finish_find_search_results(self, show, episodes, search_mode, manual_search, results, item_list, **kwargs):
|
||||
|
||||
# if we found what we needed already from cache then return results and exit
|
||||
if len(results) == len(episodes):
|
||||
return results
|
||||
|
@ -400,10 +407,10 @@ class GenericProvider:
|
|||
|
||||
# filter results
|
||||
cl = []
|
||||
parser = NameParser(False, convert=True)
|
||||
for item in item_list:
|
||||
(title, url) = self._title_and_url(item)
|
||||
|
||||
parser = NameParser(False, showObj=self.get_show(item, **kwargs), convert=True)
|
||||
# parse the file name
|
||||
try:
|
||||
parse_result = parser.parse(title)
|
||||
|
@ -441,8 +448,9 @@ class GenericProvider:
|
|||
logger.log(u'The result ' + title + u' doesn\'t seem to be a valid season that we are trying' +
|
||||
u' to snatch, ignoring', logger.DEBUG)
|
||||
add_cache_entry = True
|
||||
elif len(parse_result.episode_numbers) and not [
|
||||
ep for ep in episodes if ep.season == parse_result.season_number and
|
||||
elif len(parse_result.episode_numbers)\
|
||||
and not [ep for ep in episodes
|
||||
if ep.season == parse_result.season_number and
|
||||
ep.episode in parse_result.episode_numbers]:
|
||||
logger.log(u'The result ' + title + ' doesn\'t seem to be a valid episode that we are trying' +
|
||||
u' to snatch, ignoring', logger.DEBUG)
|
||||
|
@ -713,7 +721,7 @@ class NZBProvider(object, GenericProvider):
|
|||
def cache_data(self, *args, **kwargs):
|
||||
|
||||
search_params = {'Cache': [{}]}
|
||||
return self._search_provider(search_params)
|
||||
return self._search_provider(search_params=search_params, **kwargs)
|
||||
|
||||
|
||||
class TorrentProvider(object, GenericProvider):
|
||||
|
|
|
@ -16,13 +16,75 @@
|
|||
# You should have received a copy of the GNU General Public License
|
||||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import division
|
||||
|
||||
import time
|
||||
|
||||
import sickbeard
|
||||
import datetime
|
||||
import re
|
||||
import urllib
|
||||
from math import ceil
|
||||
|
||||
from sickbeard.sbdatetime import sbdatetime
|
||||
from . import generic
|
||||
from sickbeard import helpers, logger, scene_exceptions, tvcache
|
||||
from sickbeard.exceptions import AuthException
|
||||
from sickbeard import helpers, logger, scene_exceptions, tvcache, classes, db
|
||||
from sickbeard.common import Quality
|
||||
from sickbeard.exceptions import AuthException, MultipleShowObjectsException
|
||||
from sickbeard.indexers.indexer_config import *
|
||||
from io import BytesIO
|
||||
from lib.dateutil import parser
|
||||
from sickbeard.network_timezones import sb_timezone
|
||||
|
||||
try:
|
||||
from lxml import etree
|
||||
except ImportError:
|
||||
try:
|
||||
import xml.etree.cElementTree as etree
|
||||
except ImportError:
|
||||
import xml.etree.ElementTree as etree
|
||||
|
||||
|
||||
class NewznabConstants:
|
||||
SEARCH_TEXT = -100
|
||||
SEARCH_SEASON = -101
|
||||
SEARCH_EPISODE = -102
|
||||
|
||||
CAT_SD = -200
|
||||
CAT_HD = -201
|
||||
CAT_UHD = -202
|
||||
CAT_HEVC = -203
|
||||
CAT_ANIME = -204
|
||||
CAT_SPORT = -205
|
||||
|
||||
catSearchStrings = {r'^Anime$': CAT_ANIME,
|
||||
r'^Sport$': CAT_SPORT,
|
||||
r'^SD$': CAT_SD,
|
||||
r'^HD$': CAT_HD,
|
||||
r'^UHD$': CAT_UHD,
|
||||
r'^4K$': CAT_UHD,
|
||||
r'^HEVC$': CAT_HEVC}
|
||||
|
||||
providerToIndexerMapping = {'tvdbid': INDEXER_TVDB,
|
||||
'rageid': INDEXER_TVRAGE,
|
||||
'tvmazeid': INDEXER_TVMAZE,
|
||||
'imdbid': INDEXER_IMDB,
|
||||
'tmdbid': INDEXER_TMDB,
|
||||
'traktid': INDEXER_TRAKT}
|
||||
|
||||
indexer_priority_list = [INDEXER_TVDB, INDEXER_TVMAZE, INDEXER_TVRAGE, INDEXER_TRAKT, INDEXER_TMDB, INDEXER_TMDB]
|
||||
|
||||
searchTypes = {'rid': INDEXER_TVRAGE,
|
||||
'tvdbid': INDEXER_TVDB,
|
||||
'tvmazeid': INDEXER_TVMAZE,
|
||||
'imdbid': INDEXER_IMDB,
|
||||
'tmdbid': INDEXER_TMDB,
|
||||
'traktid': INDEXER_TRAKT,
|
||||
'q': SEARCH_TEXT,
|
||||
'season': SEARCH_SEASON,
|
||||
'ep': SEARCH_EPISODE}
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
|
||||
class NewznabProvider(generic.NZBProvider):
|
||||
|
@ -33,22 +95,182 @@ class NewznabProvider(generic.NZBProvider):
|
|||
|
||||
self.url = url
|
||||
self.key = key
|
||||
self.cat_ids = cat_ids or '5030,5040'
|
||||
self.cat_ids = cat_ids or ''
|
||||
self._cat_ids = None
|
||||
self.search_mode = search_mode or 'eponly'
|
||||
self.search_fallback = search_fallback
|
||||
self.enable_recentsearch = enable_recentsearch
|
||||
self.enable_backlog = enable_backlog
|
||||
self.needs_auth = '0' != self.key.strip() # '0' in the key setting indicates that api_key is not needed
|
||||
self.default = False
|
||||
self._caps = {}
|
||||
self._caps_cats = {}
|
||||
self._caps_all_cats = []
|
||||
self._caps_need_apikey = {'need': False, 'date': datetime.date.fromordinal(1)}
|
||||
self._limits = 100
|
||||
self._last_recent_search = None
|
||||
self._caps_last_updated = datetime.datetime.fromordinal(1)
|
||||
self.cache = NewznabCache(self)
|
||||
|
||||
@property
|
||||
def cat_ids(self):
|
||||
return self._cat_ids
|
||||
|
||||
@cat_ids.setter
|
||||
def cat_ids(self, cats):
|
||||
self._cat_ids = self.clean_newznab_categories(cats)
|
||||
|
||||
@property
|
||||
def caps(self):
|
||||
self.check_cap_update()
|
||||
return self._caps
|
||||
|
||||
@property
|
||||
def cats(self):
|
||||
self.check_cap_update()
|
||||
return self._caps_cats
|
||||
|
||||
@property
|
||||
def all_cats(self):
|
||||
self.check_cap_update()
|
||||
return self._caps_all_cats
|
||||
|
||||
@property
|
||||
def limits(self):
|
||||
self.check_cap_update()
|
||||
return self._limits
|
||||
|
||||
@property
|
||||
def last_recent_search(self):
|
||||
if not self._last_recent_search:
|
||||
try:
|
||||
my_db = db.DBConnection('cache.db')
|
||||
res = my_db.select('SELECT "datetime" FROM "lastrecentsearch" WHERE "name"=?', [self.get_id()])
|
||||
if res:
|
||||
self._last_recent_search = datetime.datetime.fromtimestamp(int(res[0]['datetime']))
|
||||
except:
|
||||
pass
|
||||
return self._last_recent_search
|
||||
|
||||
@last_recent_search.setter
|
||||
def last_recent_search(self, value):
|
||||
try:
|
||||
my_db = db.DBConnection('cache.db')
|
||||
my_db.action('INSERT OR REPLACE INTO "lastrecentsearch" (name, datetime) VALUES (?,?)',
|
||||
[self.get_id(), sbdatetime.totimestamp(value, default=0)])
|
||||
except:
|
||||
pass
|
||||
self._last_recent_search = value
|
||||
|
||||
def check_cap_update(self):
|
||||
if not self._caps or (datetime.datetime.now() - self._caps_last_updated) >= datetime.timedelta(days=1):
|
||||
self.get_caps()
|
||||
|
||||
def _get_caps_data(self):
|
||||
xml_caps = None
|
||||
if datetime.date.today() - self._caps_need_apikey['date'] > datetime.timedelta(days=30) or \
|
||||
not self._caps_need_apikey['need']:
|
||||
self._caps_need_apikey['need'] = False
|
||||
data = self.get_url('%s/api?t=caps' % self.url)
|
||||
if data:
|
||||
xml_caps = helpers.parse_xml(data)
|
||||
if (xml_caps is None or not hasattr(xml_caps, 'tag') or xml_caps.tag == 'error' or xml_caps.tag != 'caps') and \
|
||||
self.maybe_apikey():
|
||||
data = self.get_url('%s/api?t=caps&apikey=%s' % (self.url, self.maybe_apikey()))
|
||||
if data:
|
||||
xml_caps = helpers.parse_xml(data)
|
||||
if xml_caps and hasattr(xml_caps, 'tag') and xml_caps.tag == 'caps':
|
||||
self._caps_need_apikey = {'need': True, 'date': datetime.date.today()}
|
||||
return xml_caps
|
||||
|
||||
def get_caps(self):
|
||||
caps = {}
|
||||
cats = {}
|
||||
all_cats = []
|
||||
xml_caps = self._get_caps_data()
|
||||
if None is not xml_caps:
|
||||
tv_search = xml_caps.find('.//tv-search')
|
||||
if None is not tv_search:
|
||||
for c in [i for i in tv_search.get('supportedParams', '').split(',')]:
|
||||
k = NewznabConstants.searchTypes.get(c)
|
||||
if k:
|
||||
caps[k] = c
|
||||
|
||||
limit = xml_caps.find('.//limits')
|
||||
if None is not limit:
|
||||
l = helpers.tryInt(limit.get('max'), 100)
|
||||
self._limits = (100, l)[l >= 100]
|
||||
|
||||
try:
|
||||
for category in xml_caps.iter('category'):
|
||||
if 'TV' == category.get('name'):
|
||||
for subcat in category.findall('subcat'):
|
||||
try:
|
||||
cat_name = subcat.attrib['name']
|
||||
cat_id = subcat.attrib['id']
|
||||
all_cats.append({'id': cat_id, 'name': cat_name})
|
||||
for s, v in NewznabConstants.catSearchStrings.iteritems():
|
||||
if None is not re.search(s, cat_name, re.IGNORECASE):
|
||||
cats.setdefault(v, []).append(cat_id)
|
||||
except:
|
||||
continue
|
||||
elif category.get('name', '').upper() in ['XXX', 'OTHER', 'MISC']:
|
||||
for subcat in category.findall('subcat'):
|
||||
try:
|
||||
if None is not re.search(r'^Anime$', subcat.attrib['name'], re.IGNORECASE):
|
||||
cats.setdefault(NewznabConstants.CAT_ANIME, []).append(subcat.attrib['id'])
|
||||
break
|
||||
except:
|
||||
continue
|
||||
except:
|
||||
logger.log('Error parsing result for [%s]' % self.name, logger.DEBUG)
|
||||
|
||||
if not caps and self._caps and not all_cats and self._caps_all_cats and not cats and self._caps_cats:
|
||||
return
|
||||
|
||||
self._caps_last_updated = datetime.datetime.now()
|
||||
|
||||
if not caps and self.get_id() not in ['sick_beard_index']:
|
||||
caps[INDEXER_TVDB] = 'tvdbid'
|
||||
if NewznabConstants.SEARCH_TEXT not in caps or not caps.get(NewznabConstants.SEARCH_TEXT):
|
||||
caps[NewznabConstants.SEARCH_TEXT] = 'q'
|
||||
if NewznabConstants.SEARCH_SEASON not in caps or not caps.get(NewznabConstants.SEARCH_SEASON):
|
||||
caps[NewznabConstants.SEARCH_SEASON] = 'season'
|
||||
if NewznabConstants.SEARCH_EPISODE not in caps or not caps.get(NewznabConstants.SEARCH_EPISODE):
|
||||
caps[NewznabConstants.SEARCH_TEXT] = 'ep'
|
||||
if (INDEXER_TVRAGE not in caps or not caps.get(INDEXER_TVRAGE)) and self.get_id() not in ['sick_beard_index']:
|
||||
caps[INDEXER_TVRAGE] = 'rid'
|
||||
|
||||
if NewznabConstants.CAT_HD not in cats or not cats.get(NewznabConstants.CAT_HD):
|
||||
cats[NewznabConstants.CAT_HD] = ['5040']
|
||||
if NewznabConstants.CAT_SD not in cats or not cats.get(NewznabConstants.CAT_SD):
|
||||
cats[NewznabConstants.CAT_SD] = ['5030']
|
||||
if NewznabConstants.CAT_ANIME not in cats or not cats.get(NewznabConstants.CAT_ANIME):
|
||||
cats[NewznabConstants.CAT_ANIME] = (['5070'], ['6070,7040'])['nzbs_org' == self.get_id()]
|
||||
if NewznabConstants.CAT_SPORT not in cats or not cats.get(NewznabConstants.CAT_SPORT):
|
||||
cats[NewznabConstants.CAT_SPORT] = ['5060']
|
||||
|
||||
self._caps = caps
|
||||
self._caps_cats = cats
|
||||
self._caps_all_cats = all_cats
|
||||
|
||||
@staticmethod
|
||||
def clean_newznab_categories(cats):
|
||||
"""
|
||||
Removes the anime (5070), sports (5060), HD (5040), UHD (5045), SD (5030) categories from the list
|
||||
"""
|
||||
exclude = {'5070', '5060', '5040', '5045', '5030'}
|
||||
if isinstance(cats, list):
|
||||
return [x for x in cats if x['id'] not in exclude]
|
||||
return ','.join(set(cats.split(',')) - exclude)
|
||||
|
||||
def check_auth_from_data(self, data):
|
||||
|
||||
if data is None:
|
||||
return self._check_auth()
|
||||
if data is None or not hasattr(data, 'tag'):
|
||||
return False
|
||||
|
||||
if 'error' in data.feed:
|
||||
code = data.feed['error']['code']
|
||||
if 'error' == data.tag:
|
||||
code = data.get('code', '')
|
||||
|
||||
if '100' == code:
|
||||
raise AuthException('Your API key for %s is incorrect, check your config.' % self.name)
|
||||
|
@ -57,52 +279,15 @@ class NewznabProvider(generic.NZBProvider):
|
|||
elif '102' == code:
|
||||
raise AuthException('Your account isn\'t allowed to use the API on %s, contact the admin.' % self.name)
|
||||
elif '910' == code:
|
||||
logger.log(u'%s currently has their API disabled, please check with provider.' % self.name,
|
||||
logger.log('%s currently has their API disabled, please check with provider.' % self.name,
|
||||
logger.WARNING)
|
||||
else:
|
||||
logger.log(u'Unknown error given from %s: %s' % (self.name, data.feed['error']['description']),
|
||||
logger.log('Unknown error given from %s: %s' % (self.name, data.get('description', '')),
|
||||
logger.ERROR)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def get_newznab_categories(self):
|
||||
"""
|
||||
Uses the newznab provider url and apikey to get the capabilities.
|
||||
Makes use of the default newznab caps param. e.a. http://yournewznab/api?t=caps&apikey=skdfiw7823sdkdsfjsfk
|
||||
Returns a tuple with (succes or not, array with dicts [{"id": "5070", "name": "Anime"},
|
||||
{"id": "5080", "name": "Documentary"}, {"id": "5020", "name": "Foreign"}...etc}], error message)
|
||||
"""
|
||||
return_categories = []
|
||||
|
||||
api_key = self._check_auth()
|
||||
|
||||
params = {'t': 'caps'}
|
||||
if isinstance(api_key, basestring):
|
||||
params['apikey'] = api_key
|
||||
|
||||
url = '%s/api?%s' % (self.url.strip('/'), '&'.join(['%s=%s' % (k, v) for k, v in params.items()]))
|
||||
categories = self.get_url(url, timeout=10)
|
||||
if not categories:
|
||||
logger.log(u'Error getting html for [%s]' % url, logger.DEBUG)
|
||||
return False, return_categories, 'Error getting html for [%s]' % url
|
||||
|
||||
xml_categories = helpers.parse_xml(categories)
|
||||
if not xml_categories:
|
||||
logger.log(u'Error parsing xml for [%s]' % self.name, logger.DEBUG)
|
||||
return False, return_categories, 'Error parsing xml for [%s]' % self.name
|
||||
|
||||
try:
|
||||
for category in xml_categories.iter('category'):
|
||||
if 'TV' == category.get('name'):
|
||||
for subcat in category.findall('subcat'):
|
||||
return_categories.append(subcat.attrib)
|
||||
except:
|
||||
logger.log(u'Error parsing result for [%s]' % self.name, logger.DEBUG)
|
||||
return False, return_categories, 'Error parsing result for [%s]' % self.name
|
||||
|
||||
return True, return_categories, ''
|
||||
|
||||
def config_str(self):
|
||||
return '%s|%s|%s|%s|%i|%s|%i|%i|%i' \
|
||||
% (self.name or '', self.url or '', self.maybe_apikey() or '', self.cat_ids or '', self.enabled,
|
||||
|
@ -128,16 +313,11 @@ class NewznabProvider(generic.NZBProvider):
|
|||
ep_detail = 'S%02d' % helpers.tryInt(base_params['season'], 1)
|
||||
|
||||
# id search
|
||||
ids = helpers.mapIndexersToShow(ep_obj.show)
|
||||
ids_fail = '6box' in self.name
|
||||
if not ids_fail and ids[1]: # or ids[2]:
|
||||
params = base_params.copy()
|
||||
use_id = False
|
||||
if ids[1] and self.supports_tvdbid():
|
||||
params['tvdbid'] = ids[1]
|
||||
use_id = True
|
||||
if ids[2]:
|
||||
params['rid'] = ids[2]
|
||||
for i in sickbeard.indexerApi().all_indexers:
|
||||
if i in ep_obj.show.ids and 0 < ep_obj.show.ids[i]['id'] and i in self.caps:
|
||||
params[self.caps[i]] = ep_obj.show.ids[i]['id']
|
||||
use_id = True
|
||||
use_id and search_params.append(params)
|
||||
|
||||
|
@ -190,17 +370,11 @@ class NewznabProvider(generic.NZBProvider):
|
|||
'episodenumber': helpers.tryInt(base_params['ep'], 1)}
|
||||
|
||||
# id search
|
||||
ids = helpers.mapIndexersToShow(ep_obj.show)
|
||||
ids_fail = '6box' in self.name
|
||||
if not ids_fail and ids[1]: # or ids[2]:
|
||||
params = base_params.copy()
|
||||
use_id = False
|
||||
if ids[1]:
|
||||
if self.supports_tvdbid():
|
||||
params['tvdbid'] = ids[1]
|
||||
use_id = True
|
||||
if ids[2]:
|
||||
params['rid'] = ids[2]
|
||||
for i in sickbeard.indexerApi().all_indexers:
|
||||
if i in ep_obj.show.ids and 0 < ep_obj.show.ids[i]['id'] and i in self.caps:
|
||||
params[self.caps[i]] = ep_obj.show.ids[i]['id']
|
||||
use_id = True
|
||||
use_id and search_params.append(params)
|
||||
|
||||
|
@ -231,47 +405,205 @@ class NewznabProvider(generic.NZBProvider):
|
|||
|
||||
return self.get_id() not in ['sick_beard_index']
|
||||
|
||||
def _search_provider(self, search_params, **kwargs):
|
||||
def _title_and_url(self, item):
|
||||
title, url = None, None
|
||||
try:
|
||||
title = item.findtext('title')
|
||||
url = item.findtext('link')
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
title = title and re.sub(r'\s+', '.', '%s' % title)
|
||||
url = url and str(url).replace('&', '&')
|
||||
|
||||
return title, url
|
||||
|
||||
def get_show(self, item, **kwargs):
|
||||
show_obj = None
|
||||
if 'name_space' in kwargs and 'newznab' in kwargs['name_space']:
|
||||
ids = self.cache.parse_ids(item, kwargs['name_space'])
|
||||
|
||||
if ids:
|
||||
try:
|
||||
show_obj = helpers.find_show_by_id(sickbeard.showList, id_dict=ids, no_mapped_ids=False)
|
||||
except MultipleShowObjectsException:
|
||||
return None
|
||||
return show_obj
|
||||
|
||||
def choose_search_mode(self, episodes, ep_obj, hits_per_page=100):
|
||||
if not hasattr(ep_obj, 'eps_aired_in_season'):
|
||||
return None, True, True, True, hits_per_page
|
||||
searches = [e for e in episodes if (not ep_obj.show.is_scene and e.season == ep_obj.season) or
|
||||
(ep_obj.show.is_scene and e.scene_season == ep_obj.scene_season)]
|
||||
need_sd = need_hd = need_uhd = False
|
||||
max_sd = Quality.SDDVD
|
||||
hd_qualities = [Quality.HDTV, Quality.FULLHDTV, Quality.HDWEBDL, Quality.FULLHDWEBDL,
|
||||
Quality.HDBLURAY, Quality.FULLHDBLURAY]
|
||||
max_hd = Quality.FULLHDBLURAY
|
||||
for s in searches:
|
||||
if not s.show.is_anime and not s.show.is_sports:
|
||||
if not need_sd and min(s.wantedQuality) <= max_sd:
|
||||
need_sd = True
|
||||
if not need_hd and any(i in hd_qualities for i in s.wantedQuality):
|
||||
need_hd = True
|
||||
if not need_uhd and max(s.wantedQuality) > max_hd:
|
||||
need_uhd = True
|
||||
per_ep, limit_per_ep = 0, 0
|
||||
if need_sd and not need_hd:
|
||||
per_ep, limit_per_ep = 10, 25
|
||||
if need_hd:
|
||||
if not need_sd:
|
||||
per_ep, limit_per_ep = 30, 90
|
||||
else:
|
||||
per_ep, limit_per_ep = 40, 120
|
||||
if need_uhd or (need_hd and not self.cats.get(NewznabConstants.CAT_UHD)):
|
||||
per_ep += 4
|
||||
limit_per_ep += 10
|
||||
if ep_obj.show.is_anime or ep_obj.show.is_sports or ep_obj.show.air_by_date:
|
||||
rel_per_ep, limit_per_ep = 5, 10
|
||||
else:
|
||||
rel_per_ep = per_ep
|
||||
rel = int(ceil((ep_obj.eps_aired_in_scene_season if ep_obj.show.is_scene else
|
||||
ep_obj.eps_aired_in_season * rel_per_ep) / hits_per_page))
|
||||
rel_limit = int(ceil((ep_obj.eps_aired_in_scene_season if ep_obj.show.is_scene else
|
||||
ep_obj.eps_aired_in_season * limit_per_ep) / hits_per_page))
|
||||
season_search = rel < (len(searches) * 100 // hits_per_page)
|
||||
if not season_search:
|
||||
need_sd = need_hd = need_uhd = False
|
||||
if not ep_obj.show.is_anime and not ep_obj.show.is_sports:
|
||||
if min(ep_obj.wantedQuality) <= max_sd:
|
||||
need_sd = True
|
||||
if any(i in hd_qualities for i in ep_obj.wantedQuality):
|
||||
need_hd = True
|
||||
if max(ep_obj.wantedQuality) > max_hd:
|
||||
need_uhd = True
|
||||
return (season_search, need_sd, need_hd, need_uhd,
|
||||
(hits_per_page * 100 // hits_per_page * 2, hits_per_page * int(ceil(rel_limit * 1.5)))[season_search])
|
||||
|
||||
def find_search_results(self, show, episodes, search_mode, manual_search=False, try_other_searches=False, **kwargs):
|
||||
self._check_auth()
|
||||
self.show = show
|
||||
|
||||
results = {}
|
||||
item_list = []
|
||||
name_space = {}
|
||||
|
||||
searched_scene_season = s_mode = None
|
||||
for ep_obj in episodes:
|
||||
# skip if season already searched
|
||||
if (s_mode or 'sponly' == search_mode) and 1 < len(episodes) \
|
||||
and searched_scene_season == ep_obj.scene_season:
|
||||
continue
|
||||
|
||||
# search cache for episode result
|
||||
cache_result = self.cache.searchCache(ep_obj, manual_search)
|
||||
if cache_result:
|
||||
if ep_obj.episode not in results:
|
||||
results[ep_obj.episode] = cache_result
|
||||
else:
|
||||
results[ep_obj.episode].extend(cache_result)
|
||||
|
||||
# found result, search next episode
|
||||
continue
|
||||
|
||||
s_mode, need_sd, need_hd, need_uhd, max_items = self.choose_search_mode(
|
||||
episodes, ep_obj, hits_per_page=self.limits)
|
||||
|
||||
if 'sponly' == search_mode:
|
||||
searched_scene_season = ep_obj.scene_season
|
||||
|
||||
# get season search params
|
||||
search_params = self._season_strings(ep_obj)
|
||||
else:
|
||||
# get single episode search params
|
||||
if s_mode and 1 < len(episodes):
|
||||
searched_scene_season = ep_obj.scene_season
|
||||
search_params = self._season_strings(ep_obj)
|
||||
else:
|
||||
search_params = self._episode_strings(ep_obj)
|
||||
|
||||
for cur_param in search_params:
|
||||
items, n_space = self._search_provider(cur_param, search_mode=search_mode, epcount=len(episodes),
|
||||
need_anime=self.show.is_anime, need_sports=self.show.is_sports,
|
||||
need_sd=need_sd, need_hd=need_hd, need_uhd=need_uhd,
|
||||
max_items=max_items, try_all_searches=try_other_searches)
|
||||
item_list += items
|
||||
name_space.update(n_space)
|
||||
|
||||
return self.finish_find_search_results(
|
||||
show, episodes, search_mode, manual_search, results, item_list, name_space=name_space)
|
||||
|
||||
@staticmethod
|
||||
def _parse_pub_date(item, default=None):
|
||||
parsed_date = default
|
||||
try:
|
||||
p = item.findtext('pubDate')
|
||||
if p:
|
||||
p = parser.parse(p, fuzzy=True)
|
||||
try:
|
||||
p = p.astimezone(sb_timezone)
|
||||
except:
|
||||
pass
|
||||
if isinstance(p, datetime.datetime):
|
||||
parsed_date = p.replace(tzinfo=None)
|
||||
except:
|
||||
pass
|
||||
|
||||
return parsed_date
|
||||
|
||||
def _search_provider(self, search_params, need_anime=True, need_sports=True, need_sd=True, need_hd=True,
|
||||
need_uhd=True, max_items=400, try_all_searches=False, **kwargs):
|
||||
|
||||
api_key = self._check_auth()
|
||||
|
||||
base_params = {'t': 'tvsearch',
|
||||
'maxage': sickbeard.USENET_RETENTION or 0,
|
||||
'limit': 100,
|
||||
'attrs': 'rageid',
|
||||
'limit': self.limits,
|
||||
'attrs': ','.join([k for k, v in NewznabConstants.providerToIndexerMapping.iteritems()
|
||||
if v in self.caps]),
|
||||
'offset': 0}
|
||||
|
||||
if isinstance(api_key, basestring):
|
||||
base_params['apikey'] = api_key
|
||||
|
||||
results = []
|
||||
results, n_spaces = [], {}
|
||||
total, cnt, search_url, exit_log = 0, len(results), '', False
|
||||
|
||||
cat_sport = self.cats.get(NewznabConstants.CAT_SPORT, ['5060'])
|
||||
cat_anime = self.cats.get(NewznabConstants.CAT_ANIME, ['5070'])
|
||||
cat_hd = self.cats.get(NewznabConstants.CAT_HD, ['5040'])
|
||||
cat_sd = self.cats.get(NewznabConstants.CAT_SD, ['5030'])
|
||||
cat_uhd = self.cats.get(NewznabConstants.CAT_UHD)
|
||||
|
||||
for mode in search_params.keys():
|
||||
for i, params in enumerate(search_params[mode]):
|
||||
|
||||
# category ids
|
||||
cat = []
|
||||
cat_sport = ['5060']
|
||||
cat_anime = (['5070'], ['6070,7040'])['nzbs_org' == self.get_id()]
|
||||
if 'Episode' == mode or 'Season' == mode:
|
||||
if not ('rid' in params or 'tvdbid' in params or 'q' in params or not self.supports_tvdbid()):
|
||||
logger.log('Error no rid, tvdbid, or search term available for search.')
|
||||
if not (any(x in params for x in [v for c, v in self.caps.iteritems()
|
||||
if c not in [NewznabConstants.SEARCH_EPISODE, NewznabConstants.SEARCH_SEASON]]) or
|
||||
not self.supports_tvdbid()):
|
||||
logger.log('Error no id or search term available for search.')
|
||||
continue
|
||||
|
||||
if self.show:
|
||||
if self.show.is_sports:
|
||||
cat = cat_sport
|
||||
elif self.show.is_anime:
|
||||
cat = cat_anime
|
||||
else:
|
||||
cat = cat_sport + cat_anime
|
||||
if need_anime:
|
||||
cat.extend(cat_anime)
|
||||
if need_sports:
|
||||
cat.extend(cat_sport)
|
||||
|
||||
if need_hd:
|
||||
cat.extend(cat_hd)
|
||||
if need_sd:
|
||||
cat.extend(cat_sd)
|
||||
if need_uhd and cat_uhd is not None:
|
||||
cat.extend(cat_uhd)
|
||||
|
||||
if self.cat_ids or len(cat):
|
||||
base_params['cat'] = ','.join(sorted(set(self.cat_ids.split(',') + cat)))
|
||||
base_params['cat'] = ','.join(sorted(set((self.cat_ids.split(',') if self.cat_ids else []) + cat)))
|
||||
|
||||
request_params = base_params.copy()
|
||||
if 'q' in params and not (any(x in params for x in ['season', 'ep'])):
|
||||
if 'Propers' == mode and 'q' in params and not (any(x in params for x in ['season', 'ep'])):
|
||||
request_params['t'] = 'search'
|
||||
request_params.update(params)
|
||||
|
||||
|
@ -281,33 +613,54 @@ class NewznabProvider(generic.NZBProvider):
|
|||
|
||||
offset = 0
|
||||
batch_count = not 0
|
||||
first_date = last_date = None
|
||||
|
||||
# hardcoded to stop after a max of 4 hits (400 items) per query
|
||||
while (offset <= total) and (offset < (200, 400)[self.supports_tvdbid()]) and batch_count:
|
||||
while (offset <= total) and (offset < max_items) and batch_count:
|
||||
cnt = len(results)
|
||||
|
||||
data = self.cache.getRSSFeed('%sapi' % self.url, params=request_params)
|
||||
search_url = '%sapi?%s' % (self.url, urllib.urlencode(request_params))
|
||||
i and time.sleep(2.1)
|
||||
|
||||
if not data or not self.check_auth_from_data(data):
|
||||
data = helpers.getURL(search_url)
|
||||
|
||||
# hack this in until it's fixed server side
|
||||
if data and not data.startswith('<?xml'):
|
||||
data = '<?xml version="1.0" encoding="ISO-8859-1" ?>%s' % data
|
||||
|
||||
try:
|
||||
parsed_xml, n_spaces = self.cache.parse_and_get_ns(data)
|
||||
items = parsed_xml.findall('channel/item')
|
||||
except Exception:
|
||||
logger.log('Error trying to load %s RSS feed' % self.name, logger.ERROR)
|
||||
break
|
||||
|
||||
for item in data.entries:
|
||||
if not self.check_auth_from_data(parsed_xml):
|
||||
break
|
||||
|
||||
if 'rss' != parsed_xml.tag:
|
||||
logger.log('Resulting XML from %s isn\'t RSS, not parsing it' % self.name, logger.ERROR)
|
||||
break
|
||||
|
||||
i and time.sleep(2.1)
|
||||
|
||||
for item in items:
|
||||
|
||||
title, url = self._title_and_url(item)
|
||||
if title and url:
|
||||
results.append(item)
|
||||
else:
|
||||
logger.log(u'The data returned from %s is incomplete, this result is unusable' % self.name,
|
||||
logger.log('The data returned from %s is incomplete, this result is unusable' % self.name,
|
||||
logger.DEBUG)
|
||||
|
||||
# get total and offset attribs
|
||||
# get total and offset attributes
|
||||
try:
|
||||
if 0 == total:
|
||||
total = int(data.feed.newznab_response['total'] or 0)
|
||||
hits = (total / 100 + int(0 < (total % 100)))
|
||||
total = (helpers.tryInt(parsed_xml.find(
|
||||
'.//%sresponse' % n_spaces['newznab']).get('total', 0)), 1000)['Cache' == mode]
|
||||
hits = (total // self.limits + int(0 < (total % self.limits)))
|
||||
hits += int(0 == hits)
|
||||
offset = int(data.feed.newznab_response['offset'] or 0)
|
||||
offset = helpers.tryInt(parsed_xml.find('.//%sresponse' % n_spaces['newznab']).get('offset', 0))
|
||||
except AttributeError:
|
||||
break
|
||||
|
||||
|
@ -317,6 +670,12 @@ class NewznabProvider(generic.NZBProvider):
|
|||
|
||||
# Cache mode, prevent from doing another search
|
||||
if 'Cache' == mode:
|
||||
if items and len(items):
|
||||
if not first_date:
|
||||
first_date = self._parse_pub_date(items[0])
|
||||
last_date = self._parse_pub_date(items[-1])
|
||||
if not first_date or not last_date or not self._last_recent_search or \
|
||||
last_date <= self.last_recent_search:
|
||||
exit_log = True
|
||||
break
|
||||
|
||||
|
@ -336,15 +695,79 @@ class NewznabProvider(generic.NZBProvider):
|
|||
logger.log('%s more item%s to fetch from a batch of up to %s items.'
|
||||
% (items, helpers.maybe_plural(items), request_params['limit']), logger.DEBUG)
|
||||
|
||||
batch_count = self._log_result(results, mode, cnt, data.rq_response['url'])
|
||||
batch_count = self._log_result(results, mode, cnt, search_url)
|
||||
|
||||
if 'Cache' == mode and first_date:
|
||||
self.last_recent_search = first_date
|
||||
|
||||
if exit_log:
|
||||
self._log_result(results, mode, cnt, data and data.rq_response['url'] or '%sapi' % self.url)
|
||||
self._log_result(results, mode, cnt, search_url)
|
||||
exit_log = False
|
||||
|
||||
if 'tvdbid' in request_params and len(results):
|
||||
if not try_all_searches and any(x in request_params for x in [v for c, v in self.caps.iteritems()
|
||||
if c not in [NewznabConstants.SEARCH_EPISODE, NewznabConstants.SEARCH_SEASON,
|
||||
NewznabConstants.SEARCH_TEXT]]) and len(results):
|
||||
break
|
||||
|
||||
return results, n_spaces
|
||||
|
||||
def find_propers(self, search_date=None, shows=None, anime=None, **kwargs):
|
||||
cache_results = self.cache.listPropers(search_date)
|
||||
results = [classes.Proper(x['name'], x['url'], datetime.datetime.fromtimestamp(x['time']), self.show) for x in
|
||||
cache_results]
|
||||
|
||||
index = 0
|
||||
alt_search = ('nzbs_org' == self.get_id())
|
||||
do_search_alt = False
|
||||
|
||||
search_terms = []
|
||||
regex = []
|
||||
if shows:
|
||||
search_terms += ['.proper.', '.repack.']
|
||||
regex += ['proper|repack']
|
||||
proper_check = re.compile(r'(?i)(\b%s\b)' % '|'.join(regex))
|
||||
if anime:
|
||||
terms = 'v1|v2|v3|v4|v5'
|
||||
search_terms += [terms]
|
||||
regex += [terms]
|
||||
proper_check = re.compile(r'(?i)(%s)' % '|'.join(regex))
|
||||
|
||||
urls = []
|
||||
while index < len(search_terms):
|
||||
search_params = {'q': search_terms[index], 'maxage': sickbeard.BACKLOG_DAYS + 2}
|
||||
if alt_search:
|
||||
|
||||
if do_search_alt:
|
||||
search_params['t'] = 'search'
|
||||
index += 1
|
||||
|
||||
do_search_alt = not do_search_alt
|
||||
|
||||
else:
|
||||
index += 1
|
||||
|
||||
items, n_space = self._search_provider({'Propers': [search_params]})
|
||||
|
||||
for item in items:
|
||||
|
||||
(title, url) = self._title_and_url(item)
|
||||
|
||||
if not proper_check.search(title) or url in urls:
|
||||
continue
|
||||
urls.append(url)
|
||||
|
||||
result_date = self._parse_pub_date(item)
|
||||
if not result_date:
|
||||
logger.log(u'Unable to figure out the date for entry %s, skipping it' % title)
|
||||
continue
|
||||
|
||||
if not search_date or search_date < result_date:
|
||||
show_obj = self.get_show(item, name_space=n_space)
|
||||
search_result = classes.Proper(title, url, result_date, self.show, parsed_show=show_obj)
|
||||
results.append(search_result)
|
||||
|
||||
time.sleep(0.5)
|
||||
|
||||
return results
|
||||
|
||||
def _log_result(self, results, mode, cnt, url):
|
||||
|
@ -361,15 +784,31 @@ class NewznabCache(tvcache.TVCache):
|
|||
|
||||
self.update_freq = 5
|
||||
|
||||
def updateCache(self):
|
||||
# helper method to read the namespaces from xml
|
||||
@staticmethod
|
||||
def parse_and_get_ns(data):
|
||||
events = 'start', 'start-ns'
|
||||
root = None
|
||||
ns = {}
|
||||
for event, elem in etree.iterparse(BytesIO(data.encode('utf-8')), events):
|
||||
if 'start-ns' == event:
|
||||
ns[elem[0]] = '{%s}' % elem[1]
|
||||
elif 'start' == event:
|
||||
if None is root:
|
||||
root = elem
|
||||
return root, ns
|
||||
|
||||
def updateCache(self, need_anime=True, need_sports=True, need_sd=True, need_hd=True, need_uhd=True, **kwargs):
|
||||
|
||||
result = []
|
||||
|
||||
if 4489 != sickbeard.RECENTSEARCH_FREQUENCY or self.should_update():
|
||||
n_spaces = {}
|
||||
try:
|
||||
self._checkAuth()
|
||||
items = self.provider.cache_data()
|
||||
except Exception:
|
||||
(items, n_spaces) = self.provider.cache_data(need_anime=need_anime, need_sports=need_sports,
|
||||
need_sd=need_sd, need_hd=need_hd, need_uhd=need_uhd)
|
||||
except Exception as e:
|
||||
items = None
|
||||
|
||||
if items:
|
||||
|
@ -378,7 +817,7 @@ class NewznabCache(tvcache.TVCache):
|
|||
# parse data
|
||||
cl = []
|
||||
for item in items:
|
||||
ci = self._parseItem(item)
|
||||
ci = self._parseItem(n_spaces, item)
|
||||
if ci is not None:
|
||||
cl.append(ci)
|
||||
|
||||
|
@ -391,30 +830,33 @@ class NewznabCache(tvcache.TVCache):
|
|||
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def parse_ids(item, ns):
|
||||
ids = {}
|
||||
if 'newznab' in ns:
|
||||
for attr in item.findall('%sattr' % ns['newznab']):
|
||||
if attr.get('name', '') in NewznabConstants.providerToIndexerMapping:
|
||||
v = helpers.tryInt(attr.get('value'))
|
||||
if v > 0:
|
||||
ids[NewznabConstants.providerToIndexerMapping[attr.get('name')]] = v
|
||||
return ids
|
||||
|
||||
# overwrite method with that parses the rageid from the newznab feed
|
||||
def _parseItem(self, *item):
|
||||
def _parseItem(self, ns, item):
|
||||
|
||||
title = item[0].title
|
||||
url = item[0].link
|
||||
title = item.findtext('title')
|
||||
url = item.findtext('link')
|
||||
|
||||
attrs = item[0].newznab_attr
|
||||
if not isinstance(attrs, list):
|
||||
attrs = [item[0].newznab_attr]
|
||||
|
||||
tvrageid = 0
|
||||
for attr in attrs:
|
||||
if 'tvrageid' == attr['name']:
|
||||
tvrageid = int(attr['value'])
|
||||
break
|
||||
ids = self.parse_ids(item, ns)
|
||||
|
||||
self._checkItemAuth(title, url)
|
||||
|
||||
if not title or not url:
|
||||
logger.log(u'The data returned from the %s feed is incomplete, this result is unusable'
|
||||
logger.log('The data returned from the %s feed is incomplete, this result is unusable'
|
||||
% self.provider.name, logger.DEBUG)
|
||||
return None
|
||||
|
||||
url = self._translateLinkURL(url)
|
||||
|
||||
logger.log(u'Attempting to add item from RSS to cache: ' + title, logger.DEBUG)
|
||||
return self.add_cache_entry(title, url, indexer_id=tvrageid)
|
||||
logger.log('Attempting to add item from RSS to cache: %s' % title, logger.DEBUG)
|
||||
return self.add_cache_entry(title, url, id_dict=ids)
|
||||
|
|
|
@ -133,7 +133,7 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
|
|||
return data.entries
|
||||
return []
|
||||
|
||||
def _search_provider(self, search, search_mode='eponly', epcount=0, retention=0):
|
||||
def _search_provider(self, search, search_mode='eponly', epcount=0, retention=0, **kwargs):
|
||||
|
||||
api_key = self._init_api()
|
||||
if False is api_key:
|
||||
|
|
|
@ -38,7 +38,7 @@ class SceneTimeProvider(generic.TorrentProvider):
|
|||
'params': {'sec': 'jax', 'cata': 'yes'},
|
||||
'get': self.url_base + 'download.php/%(id)s/%(title)s.torrent'}
|
||||
|
||||
self.categories = {'shows': [2, 43, 9, 63, 77, 79, 101]}
|
||||
self.categories = {'shows': [2, 43, 9, 63, 77, 79, 83]}
|
||||
|
||||
self.url = self.urls['config_provider_home_uri']
|
||||
|
||||
|
|
|
@ -37,7 +37,7 @@ class ThePirateBayProvider(generic.TorrentProvider):
|
|||
generic.TorrentProvider.__init__(self, 'The Pirate Bay', cache_update_freq=20)
|
||||
|
||||
self.url_home = ['https://thepiratebay.%s/' % u for u in 'se', 'org'] + \
|
||||
['piratebay.usbypass.xyz/']
|
||||
['https://piratebay.usbypass.xyz/']
|
||||
|
||||
self.url_vars = {'search': 'search/%s/0/7/200', 'browse': 'tv/latest/'}
|
||||
self.url_tmpl = {'config_provider_home_uri': '%(home)s', 'search': '%(home)s%(vars)s',
|
||||
|
|
|
@ -20,6 +20,7 @@ import datetime
|
|||
import locale
|
||||
import functools
|
||||
import re
|
||||
import time
|
||||
|
||||
import sickbeard
|
||||
from sickbeard.network_timezones import sb_timezone
|
||||
|
@ -200,3 +201,12 @@ class sbdatetime(datetime.datetime):
|
|||
finally:
|
||||
sbdatetime.setlocale(use_has_locale=sbdatetime.has_locale)
|
||||
return strd
|
||||
|
||||
@static_or_instance
|
||||
def totimestamp(self, dt=None, default=None):
|
||||
obj = (dt, self)[self is not None]
|
||||
timestamp = default
|
||||
try:
|
||||
timestamp = time.mktime(obj.timetuple())
|
||||
finally:
|
||||
return (default, timestamp)[isinstance(timestamp, float)]
|
|
@ -29,7 +29,6 @@ from sickbeard import name_cache
|
|||
from sickbeard import logger
|
||||
from sickbeard import db
|
||||
from sickbeard.classes import OrderedDefaultdict
|
||||
from sickbeard.indexers.indexer_api import get_xem_supported_indexers
|
||||
|
||||
exception_dict = {}
|
||||
anidb_exception_dict = {}
|
||||
|
@ -45,7 +44,7 @@ exceptionLock = threading.Lock()
|
|||
def shouldRefresh(list):
|
||||
max_refresh_age_secs = 86400 # 1 day
|
||||
|
||||
my_db = db.DBConnection('cache.db')
|
||||
my_db = db.DBConnection()
|
||||
rows = my_db.select('SELECT last_refreshed FROM scene_exceptions_refresh WHERE list = ?', [list])
|
||||
if rows:
|
||||
last_refresh = int(rows[0]['last_refreshed'])
|
||||
|
@ -55,7 +54,7 @@ def shouldRefresh(list):
|
|||
|
||||
|
||||
def setLastRefresh(list):
|
||||
my_db = db.DBConnection('cache.db')
|
||||
my_db = db.DBConnection()
|
||||
my_db.upsert('scene_exceptions_refresh',
|
||||
{'last_refreshed': int(time.mktime(datetime.datetime.today().timetuple()))},
|
||||
{'list': list})
|
||||
|
@ -69,7 +68,7 @@ def get_scene_exceptions(indexer_id, season=-1):
|
|||
exceptions_list = []
|
||||
|
||||
if indexer_id not in exceptionsCache or season not in exceptionsCache[indexer_id]:
|
||||
my_db = db.DBConnection('cache.db')
|
||||
my_db = db.DBConnection()
|
||||
exceptions = my_db.select('SELECT show_name FROM scene_exceptions WHERE indexer_id = ? and season = ?',
|
||||
[indexer_id, season])
|
||||
if exceptions:
|
||||
|
@ -90,7 +89,7 @@ def get_scene_exceptions(indexer_id, season=-1):
|
|||
def get_all_scene_exceptions(indexer_id):
|
||||
exceptions_dict = OrderedDefaultdict(list)
|
||||
|
||||
my_db = db.DBConnection('cache.db')
|
||||
my_db = db.DBConnection()
|
||||
exceptions = my_db.select('SELECT show_name,season FROM scene_exceptions WHERE indexer_id = ? ORDER BY season', [indexer_id])
|
||||
|
||||
if exceptions:
|
||||
|
@ -108,7 +107,7 @@ def get_scene_seasons(indexer_id):
|
|||
exception_sseason_list = []
|
||||
|
||||
if indexer_id not in exceptionsSeasonCache:
|
||||
my_db = db.DBConnection('cache.db')
|
||||
my_db = db.DBConnection()
|
||||
sql_results = my_db.select('SELECT DISTINCT(season) as season FROM scene_exceptions WHERE indexer_id = ?',
|
||||
[indexer_id])
|
||||
if sql_results:
|
||||
|
@ -199,7 +198,7 @@ def retrieve_exceptions():
|
|||
changed_exceptions = False
|
||||
|
||||
# write all the exceptions we got off the net into the database
|
||||
my_db = db.DBConnection('cache.db')
|
||||
my_db = db.DBConnection()
|
||||
cl = []
|
||||
for cur_indexer_id in exception_dict:
|
||||
|
||||
|
@ -242,7 +241,7 @@ def update_scene_exceptions(indexer_id, scene_exceptions):
|
|||
Given a indexer_id, and a list of all show scene exceptions, update the db.
|
||||
"""
|
||||
global exceptionsCache
|
||||
my_db = db.DBConnection('cache.db')
|
||||
my_db = db.DBConnection()
|
||||
my_db.action('DELETE FROM scene_exceptions WHERE indexer_id=?', [indexer_id])
|
||||
|
||||
# A change has been made to the scene exception list. Let's clear the cache, to make this visible
|
||||
|
@ -348,10 +347,10 @@ def _xem_get_ids(indexer_name, xem_origin):
|
|||
def get_xem_ids():
|
||||
global xem_ids_list
|
||||
|
||||
for indexer in get_xem_supported_indexers().values():
|
||||
xem_ids = _xem_get_ids(indexer['name'], indexer['xem_origin'])
|
||||
for iid, name in sickbeard.indexerApi().xem_supported_indexers.iteritems():
|
||||
xem_ids = _xem_get_ids(name, sickbeard.indexerApi(iid).config['xem_origin'])
|
||||
if len(xem_ids):
|
||||
xem_ids_list[indexer['id']] = xem_ids
|
||||
xem_ids_list[iid] = xem_ids
|
||||
|
||||
|
||||
def has_abs_episodes(ep_obj=None, name=None):
|
||||
|
|
|
@ -109,7 +109,7 @@ def snatch_episode(result, end_status=SNATCHED):
|
|||
for cur_ep in result.episodes:
|
||||
if datetime.date.today() - cur_ep.airdate <= datetime.timedelta(days=7):
|
||||
result.priority = 1
|
||||
if None is not re.search('(^|[\. _-])(proper|repack)([\. _-]|$)', result.name, re.I):
|
||||
if None is not re.search('(^|[. _-])(proper|repack)([. _-]|$)', result.name, re.I):
|
||||
end_status = SNATCHED_PROPER
|
||||
|
||||
# NZBs can be sent straight to SAB or saved to disk
|
||||
|
@ -287,38 +287,57 @@ def is_first_best_match(result):
|
|||
Checks if the given result is a best quality match and if we want to archive the episode on first match.
|
||||
"""
|
||||
|
||||
logger.log(u'Checking if the first best quality match should be archived for episode %s' % result.name, logger.DEBUG)
|
||||
logger.log(u'Checking if the first best quality match should be archived for episode %s' %
|
||||
result.name, logger.DEBUG)
|
||||
|
||||
show_obj = result.episodes[0].show
|
||||
|
||||
any_qualities, best_qualities = Quality.splitQuality(show_obj.quality)
|
||||
|
||||
# if there is a redownload that's a match to one of our best qualities and we want to archive the episode then we are done
|
||||
# if there is a redownload that's a match to one of our best qualities and
|
||||
# we want to archive the episode then we are done
|
||||
if best_qualities and show_obj.archive_firstmatch and result.quality in best_qualities:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def wanted_episodes(show, from_date, make_dict=False):
|
||||
def wanted_episodes(show, from_date, make_dict=False, unaired=False):
|
||||
initial_qualities, archive_qualities = common.Quality.splitQuality(show.quality)
|
||||
all_qualities = list(set(initial_qualities + archive_qualities))
|
||||
|
||||
my_db = db.DBConnection()
|
||||
|
||||
if show.air_by_date:
|
||||
sql_string = 'SELECT ep.status, ep.season, ep.episode, ep.airdate FROM [tv_episodes] AS ep, [tv_shows] AS show WHERE season != 0 AND ep.showid = show.indexer_id AND show.paused = 0 AND ep.showid = ? AND show.air_by_date = 1'
|
||||
sql_string = 'SELECT ep.status, ep.season, ep.scene_season, ep.episode, ep.airdate ' + \
|
||||
'FROM [tv_episodes] AS ep, [tv_shows] AS show ' + \
|
||||
'WHERE season != 0 AND ep.showid = show.indexer_id AND show.paused = 0 ' + \
|
||||
'AND ep.showid = ? AND ep.indexer = ? AND show.air_by_date = 1'
|
||||
else:
|
||||
sql_string = 'SELECT status, season, episode, airdate FROM [tv_episodes] WHERE showid = ? AND season > 0'
|
||||
sql_string = 'SELECT status, season, scene_season, episode, airdate ' + \
|
||||
'FROM [tv_episodes] ' + \
|
||||
'WHERE showid = ? AND indexer = ? AND season > 0'
|
||||
|
||||
if sickbeard.SEARCH_UNAIRED:
|
||||
sql_results = my_db.select(sql_string, [show.indexerid, show.indexer])
|
||||
ep_count = {}
|
||||
ep_count_scene = {}
|
||||
tomorrow = (datetime.date.today() + datetime.timedelta(days=1)).toordinal()
|
||||
for result in sql_results:
|
||||
if 1 < helpers.tryInt(result['airdate']) <= tomorrow:
|
||||
cur_season = helpers.tryInt(result['season'])
|
||||
ep_count[cur_season] = ep_count.setdefault(cur_season, 0) + 1
|
||||
cur_scene_season = helpers.tryInt(result['scene_season'], -1)
|
||||
if -1 != cur_scene_season:
|
||||
ep_count_scene[cur_scene_season] = ep_count.setdefault(cur_scene_season, 0) + 1
|
||||
|
||||
if unaired:
|
||||
status_list = [common.WANTED, common.FAILED, common.UNAIRED]
|
||||
sql_string += ' AND ( airdate > ? OR airdate = 1 )'
|
||||
else:
|
||||
status_list = [common.WANTED, common.FAILED]
|
||||
sql_string += ' AND airdate > ?'
|
||||
|
||||
sql_results = my_db.select(sql_string, [show.indexerid, from_date.toordinal()])
|
||||
sql_results = my_db.select(sql_string, [show.indexerid, show.indexer, from_date.toordinal()])
|
||||
|
||||
# check through the list of statuses to see if we want any
|
||||
if make_dict:
|
||||
|
@ -367,11 +386,14 @@ def wanted_episodes(show, from_date, make_dict=False):
|
|||
not_downloaded = False
|
||||
|
||||
ep_obj = show.getEpisode(int(result['season']), int(result['episode']))
|
||||
if make_dict:
|
||||
wanted.setdefault(ep_obj.season, []).append(ep_obj)
|
||||
else:
|
||||
ep_obj.wantedQuality = [i for i in (initial_qualities if not_downloaded else
|
||||
wanted_qualities) if (i > cur_quality and i != common.Quality.UNKNOWN)]
|
||||
ep_obj.eps_aired_in_season = ep_count.get(helpers.tryInt(result['season']), 0)
|
||||
ep_obj.eps_aired_in_scene_season = ep_count_scene.get(
|
||||
helpers.tryInt(result['scene_season']), 0) if result['scene_season'] else None
|
||||
if make_dict:
|
||||
wanted.setdefault(ep_obj.scene_season if ep_obj.show.is_scene else ep_obj.season, []).append(ep_obj)
|
||||
else:
|
||||
wanted.append(ep_obj)
|
||||
|
||||
if 0 < total_wanted + total_replacing + total_unaired:
|
||||
|
@ -406,8 +428,8 @@ def search_for_needed_episodes(episodes):
|
|||
for cur_ep in cur_found_results:
|
||||
|
||||
if cur_ep.show.paused:
|
||||
logger.log(u'Show %s is paused, ignoring all RSS items for %s' % (cur_ep.show.name, cur_ep.prettyName()),
|
||||
logger.DEBUG)
|
||||
logger.log(u'Show %s is paused, ignoring all RSS items for %s' %
|
||||
(cur_ep.show.name, cur_ep.prettyName()), logger.DEBUG)
|
||||
continue
|
||||
|
||||
# find the best result for the current episode
|
||||
|
@ -443,7 +465,7 @@ def search_for_needed_episodes(episodes):
|
|||
return found_results.values()
|
||||
|
||||
|
||||
def search_providers(show, episodes, manual_search=False):
|
||||
def search_providers(show, episodes, manual_search=False, torrent_only=False, try_other_searches=False):
|
||||
found_results = {}
|
||||
final_results = []
|
||||
|
||||
|
@ -451,7 +473,8 @@ def search_providers(show, episodes, manual_search=False):
|
|||
|
||||
orig_thread_name = threading.currentThread().name
|
||||
|
||||
provider_list = [x for x in sickbeard.providers.sortedProviderList() if x.is_active() and x.enable_backlog]
|
||||
provider_list = [x for x in sickbeard.providers.sortedProviderList() if x.is_active() and x.enable_backlog and
|
||||
(not torrent_only or x.providerType == GenericProvider.TORRENT)]
|
||||
for cur_provider in provider_list:
|
||||
if cur_provider.anime_only and not show.is_anime:
|
||||
logger.log(u'%s is not an anime, skipping' % show.name, logger.DEBUG)
|
||||
|
@ -475,11 +498,12 @@ def search_providers(show, episodes, manual_search=False):
|
|||
|
||||
try:
|
||||
cur_provider.cache._clearCache()
|
||||
search_results = cur_provider.find_search_results(show, episodes, search_mode, manual_search)
|
||||
search_results = cur_provider.find_search_results(show, episodes, search_mode, manual_search,
|
||||
try_other_searches=try_other_searches)
|
||||
if any(search_results):
|
||||
logger.log(', '.join(['%s%s has %s candidate%s' % (
|
||||
('S', 'Ep')['ep' in search_mode], k, len(v), helpers.maybe_plural(len(v)))
|
||||
for (k, v) in search_results.iteritems()]))
|
||||
logger.log(', '.join(['%s %s candidate%s' % (
|
||||
len(v), (('multiep', 'season')[SEASON_RESULT == k], 'episode')['ep' in search_mode],
|
||||
helpers.maybe_plural(len(v))) for (k, v) in search_results.iteritems()]))
|
||||
except exceptions.AuthException as e:
|
||||
logger.log(u'Authentication error: %s' % ex(e), logger.ERROR)
|
||||
break
|
||||
|
@ -497,8 +521,8 @@ def search_providers(show, episodes, manual_search=False):
|
|||
for cur_ep in search_results:
|
||||
# skip non-tv crap
|
||||
search_results[cur_ep] = filter(
|
||||
lambda item: show_name_helpers.pass_wordlist_checks(item.name, parse=False) and
|
||||
item.show == show, search_results[cur_ep])
|
||||
lambda ep_item: show_name_helpers.pass_wordlist_checks(
|
||||
ep_item.name, parse=False) and ep_item.show == show, search_results[cur_ep])
|
||||
|
||||
if cur_ep in found_results:
|
||||
found_results[provider_id][cur_ep] += search_results[cur_ep]
|
||||
|
@ -556,7 +580,8 @@ def search_providers(show, episodes, manual_search=False):
|
|||
else:
|
||||
any_wanted = True
|
||||
|
||||
# if we need every ep in the season and there's nothing better then just download this and be done with it (unless single episodes are preferred)
|
||||
# if we need every ep in the season and there's nothing better then just download this and
|
||||
# be done with it (unless single episodes are preferred)
|
||||
if all_wanted and highest_quality_overall == best_season_result.quality:
|
||||
logger.log(u'Every episode in this season is needed, downloading the whole %s %s' %
|
||||
(best_season_result.provider.providerType, best_season_result.name))
|
||||
|
@ -579,7 +604,8 @@ def search_providers(show, episodes, manual_search=False):
|
|||
individual_results = nzbSplitter.splitResult(best_season_result)
|
||||
|
||||
individual_results = filter(
|
||||
lambda r: show_name_helpers.pass_wordlist_checks(r.name, parse=False) and r.show == show, individual_results)
|
||||
lambda r: show_name_helpers.pass_wordlist_checks(
|
||||
r.name, parse=False) and r.show == show, individual_results)
|
||||
|
||||
for cur_result in individual_results:
|
||||
if 1 == len(cur_result.episodes):
|
||||
|
@ -592,10 +618,11 @@ def search_providers(show, episodes, manual_search=False):
|
|||
else:
|
||||
found_results[provider_id][ep_num] = [cur_result]
|
||||
|
||||
# If this is a torrent all we can do is leech the entire torrent, user will have to select which eps not do download in his torrent client
|
||||
# If this is a torrent all we can do is leech the entire torrent,
|
||||
# user will have to select which eps not do download in his torrent client
|
||||
else:
|
||||
|
||||
# Season result from Torrent Provider must be a full-season torrent, creating multi-ep result for it.
|
||||
# Season result from Torrent Provider must be a full-season torrent, creating multi-ep result for it
|
||||
logger.log(u'Adding multi episode result for full season torrent. In your torrent client, set ' +
|
||||
u'the episodes that you do not want to "don\'t download"')
|
||||
ep_objs = []
|
||||
|
@ -637,7 +664,8 @@ def search_providers(show, episodes, manual_search=False):
|
|||
(needed_eps, not_needed_eps), logger.DEBUG)
|
||||
|
||||
if not not_needed_eps:
|
||||
logger.log(u'All of these episodes were covered by single episode results, ignoring this multi episode result', logger.DEBUG)
|
||||
logger.log(u'All of these episodes were covered by single episode results, ' +
|
||||
'ignoring this multi episode result', logger.DEBUG)
|
||||
continue
|
||||
|
||||
# check if these eps are already covered by another multi-result
|
||||
|
@ -650,11 +678,12 @@ def search_providers(show, episodes, manual_search=False):
|
|||
else:
|
||||
multi_needed_eps.append(ep_num)
|
||||
|
||||
logger.log(u'Multi episode check result is... multi needed episodes: %s, multi not needed episodes: %s' %
|
||||
(multi_needed_eps, multi_not_needed_eps), logger.DEBUG)
|
||||
logger.log(u'Multi episode check result is... multi needed episodes: ' +
|
||||
'%s, multi not needed episodes: %s' % (multi_needed_eps, multi_not_needed_eps), logger.DEBUG)
|
||||
|
||||
if not multi_needed_eps:
|
||||
logger.log(u'All of these episodes were covered by another multi episode nzb, ignoring this multi episode result',
|
||||
logger.log(u'All of these episodes were covered by another multi episode nzb, ' +
|
||||
'ignoring this multi episode result',
|
||||
logger.DEBUG)
|
||||
continue
|
||||
|
||||
|
@ -666,8 +695,8 @@ def search_providers(show, episodes, manual_search=False):
|
|||
for ep_obj in multi_result.episodes:
|
||||
ep_num = ep_obj.episode
|
||||
if ep_num in found_results[provider_id]:
|
||||
logger.log(u'A needed multi episode result overlaps with a single episode result for episode #%s, removing the single episode results from the list' %
|
||||
ep_num, logger.DEBUG)
|
||||
logger.log(u'A needed multi episode result overlaps with a single episode result for episode ' +
|
||||
'#%s, removing the single episode results from the list' % ep_num, logger.DEBUG)
|
||||
del found_results[provider_id][ep_num]
|
||||
|
||||
# of all the single ep results narrow it down to the best one for each episode
|
||||
|
|
|
@ -27,45 +27,78 @@ from sickbeard import db, scheduler, helpers
|
|||
from sickbeard import search_queue
|
||||
from sickbeard import logger
|
||||
from sickbeard import ui
|
||||
from sickbeard import common
|
||||
from sickbeard.providers.generic import GenericProvider
|
||||
from sickbeard.search import wanted_episodes
|
||||
from sickbeard.helpers import find_show_by_id
|
||||
from sickbeard.sbdatetime import sbdatetime
|
||||
|
||||
NORMAL_BACKLOG = 0
|
||||
LIMITED_BACKLOG = 10
|
||||
FULL_BACKLOG = 20
|
||||
FORCED_BACKLOG = 30
|
||||
|
||||
|
||||
class BacklogSearchScheduler(scheduler.Scheduler):
|
||||
def forceSearch(self, force_type=NORMAL_BACKLOG):
|
||||
self.force = True
|
||||
def force_search(self, force_type=NORMAL_BACKLOG):
|
||||
self.action.forcetype = force_type
|
||||
self.action.force = True
|
||||
self.force = True
|
||||
|
||||
def nextRun(self):
|
||||
if self.action._lastBacklog <= 1:
|
||||
def next_run(self):
|
||||
if 1 >= self.action._lastBacklog:
|
||||
return datetime.date.today()
|
||||
elif (self.action._lastBacklog + self.action.cycleTime) < datetime.date.today().toordinal():
|
||||
return datetime.date.today()
|
||||
else:
|
||||
return datetime.date.fromordinal(self.action._lastBacklog + self.action.cycleTime)
|
||||
|
||||
def next_backlog_timeleft(self):
|
||||
now = datetime.datetime.now()
|
||||
torrent_enabled = 0 < len([x for x in sickbeard.providers.sortedProviderList() if x.is_active() and
|
||||
x.enable_backlog and x.providerType == GenericProvider.TORRENT])
|
||||
if now > self.action.nextBacklog or self.action.nextCyleTime != self.cycleTime:
|
||||
nextruntime = now + self.timeLeft()
|
||||
if not torrent_enabled:
|
||||
nextpossibleruntime = (datetime.datetime.fromtimestamp(self.action.last_runtime) +
|
||||
datetime.timedelta(hours=23))
|
||||
for _ in xrange(5):
|
||||
if nextruntime > nextpossibleruntime:
|
||||
self.action.nextBacklog = nextruntime
|
||||
self.action.nextCyleTime = self.cycleTime
|
||||
break
|
||||
nextruntime += self.cycleTime
|
||||
else:
|
||||
self.action.nextCyleTime = self.cycleTime
|
||||
self.action.nextBacklog = nextruntime
|
||||
return self.action.nextBacklog - now if self.action.nextBacklog > now else datetime.timedelta(seconds=0)
|
||||
|
||||
|
||||
class BacklogSearcher:
|
||||
def __init__(self):
|
||||
|
||||
self._lastBacklog = self._get_lastBacklog()
|
||||
self._lastBacklog = self._get_last_backlog()
|
||||
self.cycleTime = sickbeard.BACKLOG_FREQUENCY
|
||||
self.lock = threading.Lock()
|
||||
self.amActive = False
|
||||
self.amPaused = False
|
||||
self.amWaiting = False
|
||||
self.forcetype = NORMAL_BACKLOG
|
||||
self.force = False
|
||||
self.nextBacklog = datetime.datetime.fromtimestamp(1)
|
||||
self.nextCyleTime = None
|
||||
self.currentSearchInfo = None
|
||||
|
||||
self._resetPI()
|
||||
self._reset_progress_indicator()
|
||||
|
||||
def _resetPI(self):
|
||||
@property
|
||||
def last_runtime(self):
|
||||
return self._get_last_runtime()
|
||||
|
||||
def _reset_progress_indicator(self):
|
||||
self.percentDone = 0
|
||||
self.currentSearchInfo = {'title': 'Initializing'}
|
||||
|
||||
def getProgressIndicator(self):
|
||||
def get_progress_indicator(self):
|
||||
if self.amActive:
|
||||
return ui.ProgressIndicator(self.percentDone, self.currentSearchInfo)
|
||||
else:
|
||||
|
@ -75,7 +108,18 @@ class BacklogSearcher:
|
|||
logger.log(u'amWaiting: ' + str(self.amWaiting) + ', amActive: ' + str(self.amActive), logger.DEBUG)
|
||||
return (not self.amWaiting) and self.amActive
|
||||
|
||||
def search_backlog(self, which_shows=None, force_type=NORMAL_BACKLOG):
|
||||
def add_backlog_item(self, items, standard_backlog, limited_backlog, forced, torrent_only):
|
||||
for segments in items:
|
||||
if len(segments):
|
||||
for season, segment in segments.items():
|
||||
self.currentSearchInfo = {'title': segment[0].show.name + ' Season ' + str(season)}
|
||||
|
||||
backlog_queue_item = search_queue.BacklogQueueItem(
|
||||
segment[0].show, segment, standard_backlog=standard_backlog, limited_backlog=limited_backlog,
|
||||
forced=forced, torrent_only=torrent_only)
|
||||
sickbeard.searchQueueScheduler.action.add_item(backlog_queue_item)
|
||||
|
||||
def search_backlog(self, which_shows=None, force_type=NORMAL_BACKLOG, force=False):
|
||||
|
||||
if self.amActive:
|
||||
logger.log(u'Backlog is still running, not starting it again', logger.DEBUG)
|
||||
|
@ -88,85 +132,196 @@ class BacklogSearcher:
|
|||
show_list = sickbeard.showList
|
||||
standard_backlog = True
|
||||
|
||||
self._get_lastBacklog()
|
||||
now = datetime.datetime.now()
|
||||
torrent_only = continued_backlog = False
|
||||
if not force and standard_backlog and (datetime.datetime.now() - datetime.datetime.fromtimestamp(
|
||||
self._get_last_runtime())) < datetime.timedelta(hours=23):
|
||||
if [x for x in sickbeard.providers.sortedProviderList() if x.is_active() and x.enable_backlog and
|
||||
x.providerType == GenericProvider.TORRENT]:
|
||||
torrent_only = True
|
||||
else:
|
||||
logger.log('Last scheduled Backlog run was within the last day, skipping this run.', logger.DEBUG)
|
||||
return
|
||||
|
||||
curDate = datetime.date.today().toordinal()
|
||||
fromDate = datetime.date.fromordinal(1)
|
||||
self._get_last_backlog()
|
||||
self.amActive = True
|
||||
self.amPaused = False
|
||||
|
||||
cur_date = datetime.date.today().toordinal()
|
||||
from_date = datetime.date.fromordinal(1)
|
||||
limited_from_date = datetime.date.today() - datetime.timedelta(days=sickbeard.BACKLOG_DAYS)
|
||||
|
||||
limited_backlog = False
|
||||
if (not which_shows and force_type == LIMITED_BACKLOG) or (not which_shows and force_type != FULL_BACKLOG and not curDate - self._lastBacklog >= self.cycleTime):
|
||||
logger.log(u'Running limited backlog for episodes missed during the last %s day(s)' % str(sickbeard.BACKLOG_DAYS))
|
||||
fromDate = datetime.date.today() - datetime.timedelta(days=sickbeard.BACKLOG_DAYS)
|
||||
if not which_shows and torrent_only:
|
||||
logger.log(u'Running limited backlog for episodes missed during the last %s day(s)' %
|
||||
str(sickbeard.BACKLOG_DAYS))
|
||||
from_date = limited_from_date
|
||||
limited_backlog = True
|
||||
|
||||
runparts = []
|
||||
if standard_backlog and not torrent_only:
|
||||
my_db = db.DBConnection('cache.db')
|
||||
sql_result = my_db.select('SELECT * FROM backlogparts WHERE part in (SELECT MIN(part) FROM backlogparts)')
|
||||
if sql_result:
|
||||
sl = []
|
||||
part_nr = int(sql_result[0]['part'])
|
||||
for s in sql_result:
|
||||
show_obj = find_show_by_id(sickbeard.showList, {int(s['indexer']): int(s['indexerid'])})
|
||||
if show_obj:
|
||||
sl.append(show_obj)
|
||||
runparts.append([int(s['indexerid']), int(s['indexer'])])
|
||||
show_list = sl
|
||||
continued_backlog = True
|
||||
my_db.action('DELETE FROM backlogparts WHERE part = ?', [part_nr])
|
||||
|
||||
forced = False
|
||||
if not which_shows and force_type != NORMAL_BACKLOG:
|
||||
forced = True
|
||||
|
||||
self.amActive = True
|
||||
self.amPaused = False
|
||||
|
||||
# go through non air-by-date shows and see if they need any episodes
|
||||
wanted_list = []
|
||||
for curShow in show_list:
|
||||
if not curShow.paused:
|
||||
w = wanted_episodes(curShow, from_date, make_dict=True,
|
||||
unaired=(sickbeard.SEARCH_UNAIRED and not sickbeard.UNAIRED_RECENT_SEARCH_ONLY))
|
||||
if w:
|
||||
wanted_list.append(w)
|
||||
|
||||
if curShow.paused:
|
||||
parts = []
|
||||
if standard_backlog and not torrent_only and not continued_backlog:
|
||||
fullbacklogparts = sum([len(w) for w in wanted_list if w]) / sickbeard.BACKLOG_FREQUENCY
|
||||
h_part = []
|
||||
counter = 0
|
||||
for w in wanted_list:
|
||||
f = False
|
||||
for season, segment in w.iteritems():
|
||||
counter += 1
|
||||
if not f:
|
||||
h_part.append([segment[0].show.indexerid, segment[0].show.indexer])
|
||||
f = True
|
||||
if counter > fullbacklogparts:
|
||||
counter = 0
|
||||
parts.append(h_part)
|
||||
h_part = []
|
||||
|
||||
if h_part:
|
||||
parts.append(h_part)
|
||||
|
||||
def in_showlist(show, showlist):
|
||||
return 0 < len([item for item in showlist if item[1] == show.indexer and item[0] == show.indexerid])
|
||||
|
||||
if not runparts and parts:
|
||||
runparts = parts[0]
|
||||
wanted_list = [w for w in wanted_list if w and in_showlist(w.itervalues().next()[0].show, runparts)]
|
||||
|
||||
limited_wanted_list = []
|
||||
if standard_backlog and not torrent_only and runparts:
|
||||
for curShow in sickbeard.showList:
|
||||
if not curShow.paused and not in_showlist(curShow, runparts):
|
||||
w = wanted_episodes(curShow, limited_from_date, make_dict=True,
|
||||
unaired=(sickbeard.SEARCH_UNAIRED and not sickbeard.UNAIRED_RECENT_SEARCH_ONLY))
|
||||
if w:
|
||||
limited_wanted_list.append(w)
|
||||
|
||||
self.add_backlog_item(wanted_list, standard_backlog, limited_backlog, forced, torrent_only)
|
||||
if standard_backlog and not torrent_only and limited_wanted_list:
|
||||
self.add_backlog_item(limited_wanted_list, standard_backlog, True, forced, torrent_only)
|
||||
|
||||
if standard_backlog and not torrent_only and not continued_backlog:
|
||||
cl = ([], [['DELETE FROM backlogparts']])[len(parts) > 1]
|
||||
for i, l in enumerate(parts):
|
||||
if 0 == i:
|
||||
continue
|
||||
for m in l:
|
||||
cl.append(['INSERT INTO backlogparts (part, indexerid, indexer) VALUES (?,?,?)',
|
||||
[i + 1, m[0], m[1]]])
|
||||
|
||||
segments = wanted_episodes(curShow, fromDate, make_dict=True)
|
||||
|
||||
for season, segment in segments.items():
|
||||
self.currentSearchInfo = {'title': curShow.name + ' Season ' + str(season)}
|
||||
|
||||
backlog_queue_item = search_queue.BacklogQueueItem(curShow, segment, standard_backlog=standard_backlog, limited_backlog=limited_backlog, forced=forced)
|
||||
sickbeard.searchQueueScheduler.action.add_item(backlog_queue_item) # @UndefinedVariable
|
||||
else:
|
||||
logger.log(u'Nothing needs to be downloaded for %s, skipping' % str(curShow.name), logger.DEBUG)
|
||||
if 0 < len(cl):
|
||||
my_db.mass_action(cl)
|
||||
|
||||
# don't consider this an actual backlog search if we only did recent eps
|
||||
# or if we only did certain shows
|
||||
if fromDate == datetime.date.fromordinal(1) and not which_shows:
|
||||
self._set_lastBacklog(curDate)
|
||||
self._get_lastBacklog()
|
||||
if from_date == datetime.date.fromordinal(1) and not which_shows:
|
||||
self._set_last_backlog(cur_date)
|
||||
self._get_last_backlog()
|
||||
|
||||
if standard_backlog and not torrent_only:
|
||||
self._set_last_runtime(now)
|
||||
|
||||
self.amActive = False
|
||||
self._resetPI()
|
||||
self._reset_progress_indicator()
|
||||
|
||||
def _get_lastBacklog(self):
|
||||
@staticmethod
|
||||
def _get_last_runtime():
|
||||
logger.log('Retrieving the last runtime of Backlog from the DB', logger.DEBUG)
|
||||
|
||||
logger.log(u'Retrieving the last check time from the DB', logger.DEBUG)
|
||||
my_db = db.DBConnection()
|
||||
sql_results = my_db.select('SELECT * FROM info')
|
||||
|
||||
myDB = db.DBConnection()
|
||||
sqlResults = myDB.select('SELECT * FROM info')
|
||||
|
||||
if len(sqlResults) == 0:
|
||||
lastBacklog = 1
|
||||
elif sqlResults[0]['last_backlog'] == None or sqlResults[0]['last_backlog'] == '':
|
||||
lastBacklog = 1
|
||||
if 0 == len(sql_results):
|
||||
last_run_time = 1
|
||||
elif None is sql_results[0]['last_run_backlog'] or '' == sql_results[0]['last_run_backlog']:
|
||||
last_run_time = 1
|
||||
else:
|
||||
lastBacklog = int(sqlResults[0]['last_backlog'])
|
||||
if lastBacklog > datetime.date.today().toordinal():
|
||||
lastBacklog = 1
|
||||
last_run_time = int(sql_results[0]['last_run_backlog'])
|
||||
if last_run_time > sbdatetime.now().totimestamp(default=0):
|
||||
last_run_time = 1
|
||||
|
||||
self._lastBacklog = lastBacklog
|
||||
return last_run_time
|
||||
|
||||
def _set_last_runtime(self, when):
|
||||
logger.log('Setting the last backlog runtime in the DB to %s' % when, logger.DEBUG)
|
||||
|
||||
my_db = db.DBConnection()
|
||||
sql_results = my_db.select('SELECT * FROM info')
|
||||
|
||||
if len(sql_results) == 0:
|
||||
my_db.action('INSERT INTO info (last_backlog, last_indexer, last_run_backlog) VALUES (?,?,?)',
|
||||
[1, 0, sbdatetime.totimestamp(when, default=0)])
|
||||
else:
|
||||
my_db.action('UPDATE info SET last_run_backlog=%s' % sbdatetime.totimestamp(when, default=0))
|
||||
|
||||
self.nextBacklog = datetime.datetime.fromtimestamp(1)
|
||||
|
||||
def _get_last_backlog(self):
|
||||
|
||||
logger.log('Retrieving the last check time from the DB', logger.DEBUG)
|
||||
|
||||
my_db = db.DBConnection()
|
||||
sql_results = my_db.select('SELECT * FROM info')
|
||||
|
||||
if 0 == len(sql_results):
|
||||
last_backlog = 1
|
||||
elif None is sql_results[0]['last_backlog'] or '' == sql_results[0]['last_backlog']:
|
||||
last_backlog = 1
|
||||
else:
|
||||
last_backlog = int(sql_results[0]['last_backlog'])
|
||||
if last_backlog > datetime.date.today().toordinal():
|
||||
last_backlog = 1
|
||||
|
||||
self._lastBacklog = last_backlog
|
||||
return self._lastBacklog
|
||||
|
||||
def _set_lastBacklog(self, when):
|
||||
@staticmethod
|
||||
def _set_last_backlog(when):
|
||||
|
||||
logger.log(u'Setting the last backlog in the DB to ' + str(when), logger.DEBUG)
|
||||
logger.log('Setting the last backlog in the DB to %s' % when, logger.DEBUG)
|
||||
|
||||
myDB = db.DBConnection()
|
||||
sqlResults = myDB.select('SELECT * FROM info')
|
||||
my_db = db.DBConnection()
|
||||
sql_results = my_db.select('SELECT * FROM info')
|
||||
|
||||
if len(sqlResults) == 0:
|
||||
myDB.action('INSERT INTO info (last_backlog, last_indexer) VALUES (?,?)', [str(when), 0])
|
||||
if len(sql_results) == 0:
|
||||
my_db.action('INSERT INTO info (last_backlog, last_indexer, last_run_backlog) VALUES (?,?,?)',
|
||||
[str(when), 0, 1])
|
||||
else:
|
||||
myDB.action('UPDATE info SET last_backlog=' + str(when))
|
||||
my_db.action('UPDATE info SET last_backlog=%s' % when)
|
||||
|
||||
def run(self):
|
||||
try:
|
||||
force_type = self.forcetype
|
||||
force = self.force
|
||||
self.forcetype = NORMAL_BACKLOG
|
||||
self.search_backlog(force_type=force_type)
|
||||
self.force = False
|
||||
self.search_backlog(force_type=force_type, force=force)
|
||||
except:
|
||||
self.amActive = False
|
||||
raise
|
||||
|
|
|
@ -26,6 +26,7 @@ import sickbeard
|
|||
from sickbeard import db, logger, common, exceptions, helpers, network_timezones, generic_queue, search, \
|
||||
failed_history, history, ui, properFinder
|
||||
from sickbeard.search import wanted_episodes
|
||||
from sickbeard.common import Quality
|
||||
|
||||
|
||||
search_queue_lock = threading.Lock()
|
||||
|
@ -70,7 +71,8 @@ class SearchQueue(generic_queue.GenericQueue):
|
|||
with self.lock:
|
||||
ep_obj_list = []
|
||||
for cur_item in self.queue:
|
||||
if isinstance(cur_item, (ManualSearchQueueItem, FailedQueueItem)) and str(cur_item.show.indexerid) == show:
|
||||
if (isinstance(cur_item, (ManualSearchQueueItem, FailedQueueItem)) and
|
||||
show == str(cur_item.show.indexerid)):
|
||||
ep_obj_list.append(cur_item)
|
||||
|
||||
if ep_obj_list:
|
||||
|
@ -146,13 +148,19 @@ class SearchQueue(generic_queue.GenericQueue):
|
|||
if isinstance(cur_item, RecentSearchQueueItem):
|
||||
length['recent'] += 1
|
||||
elif isinstance(cur_item, BacklogQueueItem):
|
||||
length['backlog'].append([cur_item.show.indexerid, cur_item.show.name, cur_item.segment, cur_item.standard_backlog, cur_item.limited_backlog, cur_item.forced])
|
||||
length['backlog'].append({'indexerid': cur_item.show.indexerid, 'indexer': cur_item.show.indexer,
|
||||
'name': cur_item.show.name, 'segment': cur_item.segment,
|
||||
'standard_backlog': cur_item.standard_backlog,
|
||||
'limited_backlog': cur_item.limited_backlog, 'forced': cur_item.forced,
|
||||
'torrent_only': cur_item.torrent_only})
|
||||
elif isinstance(cur_item, ProperSearchQueueItem):
|
||||
length['proper'] += 1
|
||||
elif isinstance(cur_item, ManualSearchQueueItem):
|
||||
length['manual'].append([cur_item.show.indexerid, cur_item.show.name, cur_item.segment])
|
||||
length['manual'].append({'indexerid': cur_item.show.indexerid, 'indexer': cur_item.show.indexer,
|
||||
'name': cur_item.show.name, 'segment': cur_item.segment})
|
||||
elif isinstance(cur_item, FailedQueueItem):
|
||||
length['failed'].append([cur_item.show.indexerid, cur_item.show.name, cur_item.segment])
|
||||
length['failed'].append({'indexerid': cur_item.show.indexerid, 'indexer': cur_item.show.indexer,
|
||||
'name': cur_item.show.name, 'segment': cur_item.segment})
|
||||
return length
|
||||
|
||||
def add_item(self, item):
|
||||
|
@ -181,15 +189,36 @@ class RecentSearchQueueItem(generic_queue.QueueItem):
|
|||
try:
|
||||
self._change_missing_episodes()
|
||||
|
||||
self.update_providers()
|
||||
|
||||
show_list = sickbeard.showList
|
||||
from_date = datetime.date.fromordinal(1)
|
||||
need_anime = need_sports = need_sd = need_hd = need_uhd = False
|
||||
max_sd = Quality.SDDVD
|
||||
hd_qualities = [Quality.HDTV, Quality.FULLHDTV, Quality.HDWEBDL, Quality.FULLHDWEBDL,
|
||||
Quality.HDBLURAY, Quality.FULLHDBLURAY]
|
||||
max_hd = Quality.FULLHDBLURAY
|
||||
for curShow in show_list:
|
||||
if curShow.paused:
|
||||
continue
|
||||
|
||||
self.episodes.extend(wanted_episodes(curShow, from_date))
|
||||
wanted_eps = wanted_episodes(curShow, from_date, unaired=sickbeard.SEARCH_UNAIRED)
|
||||
if wanted_eps:
|
||||
if not need_anime and curShow.is_anime:
|
||||
need_anime = True
|
||||
if not need_sports and curShow.is_sports:
|
||||
need_sports = True
|
||||
if not need_sd or not need_hd:
|
||||
for w in wanted_eps:
|
||||
if not w.show.is_anime and not w.show.is_sports:
|
||||
if not need_sd and max_sd >= min(w.wantedQuality):
|
||||
need_sd = True
|
||||
if not need_hd and any(i in hd_qualities for i in w.wantedQuality):
|
||||
need_hd = True
|
||||
if not need_uhd and max_hd < max(w.wantedQuality):
|
||||
need_uhd = True
|
||||
self.episodes.extend(wanted_eps)
|
||||
|
||||
self.update_providers(need_anime=need_anime, need_sports=need_sports,
|
||||
need_sd=need_sd, need_hd=need_hd, need_uhd=need_uhd)
|
||||
|
||||
if not self.episodes:
|
||||
logger.log(u'No search of cache for episodes required')
|
||||
|
@ -257,7 +286,8 @@ class RecentSearchQueueItem(generic_queue.QueueItem):
|
|||
continue
|
||||
|
||||
try:
|
||||
end_time = network_timezones.parse_date_time(sqlEp['airdate'], show.airs, show.network) + datetime.timedelta(minutes=helpers.tryInt(show.runtime, 60))
|
||||
end_time = (network_timezones.parse_date_time(sqlEp['airdate'], show.airs, show.network) +
|
||||
datetime.timedelta(minutes=helpers.tryInt(show.runtime, 60)))
|
||||
# filter out any episodes that haven't aired yet
|
||||
if end_time > cur_time:
|
||||
continue
|
||||
|
@ -283,7 +313,7 @@ class RecentSearchQueueItem(generic_queue.QueueItem):
|
|||
logger.log(u'Found new episodes marked wanted')
|
||||
|
||||
@staticmethod
|
||||
def update_providers():
|
||||
def update_providers(need_anime=True, need_sports=True, need_sd=True, need_hd=True, need_uhd=True):
|
||||
orig_thread_name = threading.currentThread().name
|
||||
threads = []
|
||||
|
||||
|
@ -297,6 +327,8 @@ class RecentSearchQueueItem(generic_queue.QueueItem):
|
|||
|
||||
# spawn a thread for each provider to save time waiting for slow response providers
|
||||
threads.append(threading.Thread(target=cur_provider.cache.updateCache,
|
||||
kwargs={'need_anime': need_anime, 'need_sports': need_sports,
|
||||
'need_sd': need_sd, 'need_hd': need_hd, 'need_uhd': need_uhd},
|
||||
name='%s :: [%s]' % (orig_thread_name, cur_provider.name)))
|
||||
# start the thread we just created
|
||||
threads[-1].start()
|
||||
|
@ -344,7 +376,7 @@ class ManualSearchQueueItem(generic_queue.QueueItem):
|
|||
logger.log(u'Beginning manual search for: [%s]' % self.segment.prettyName())
|
||||
self.started = True
|
||||
|
||||
search_result = search.search_providers(self.show, [self.segment], True)
|
||||
search_result = search.search_providers(self.show, [self.segment], True, try_other_searches=True)
|
||||
|
||||
if search_result:
|
||||
# just use the first result for now
|
||||
|
@ -373,7 +405,7 @@ class ManualSearchQueueItem(generic_queue.QueueItem):
|
|||
|
||||
|
||||
class BacklogQueueItem(generic_queue.QueueItem):
|
||||
def __init__(self, show, segment, standard_backlog=False, limited_backlog=False, forced=False):
|
||||
def __init__(self, show, segment, standard_backlog=False, limited_backlog=False, forced=False, torrent_only=False):
|
||||
generic_queue.QueueItem.__init__(self, 'Backlog', BACKLOG_SEARCH)
|
||||
self.priority = generic_queue.QueuePriorities.LOW
|
||||
self.name = 'BACKLOG-%s' % show.indexerid
|
||||
|
@ -383,13 +415,16 @@ class BacklogQueueItem(generic_queue.QueueItem):
|
|||
self.standard_backlog = standard_backlog
|
||||
self.limited_backlog = limited_backlog
|
||||
self.forced = forced
|
||||
self.torrent_only = torrent_only
|
||||
|
||||
def run(self):
|
||||
generic_queue.QueueItem.run(self)
|
||||
|
||||
try:
|
||||
logger.log(u'Beginning backlog search for: [%s]' % self.show.name)
|
||||
search_result = search.search_providers(self.show, self.segment, False)
|
||||
search_result = search.search_providers(
|
||||
self.show, self.segment, False,
|
||||
try_other_searches=(not self.standard_backlog or not self.limited_backlog))
|
||||
|
||||
if search_result:
|
||||
for result in search_result:
|
||||
|
@ -436,7 +471,7 @@ class FailedQueueItem(generic_queue.QueueItem):
|
|||
failed_history.revertEpisode(epObj)
|
||||
logger.log(u'Beginning failed download search for: [%s]' % epObj.prettyName())
|
||||
|
||||
search_result = search.search_providers(self.show, self.segment, True)
|
||||
search_result = search.search_providers(self.show, self.segment, True, try_other_searches=True)
|
||||
|
||||
if search_result:
|
||||
for result in search_result:
|
||||
|
|
|
@ -88,25 +88,31 @@ class ShowQueue(generic_queue.GenericQueue):
|
|||
with self.lock:
|
||||
for cur_item in [self.currentItem] + self.queue:
|
||||
if isinstance(cur_item, QueueItemAdd):
|
||||
length['add'].append([cur_item.show_name, cur_item.scheduled_update])
|
||||
length['add'].append({'name': cur_item.show_name, 'scheduled_update': cur_item.scheduled_update})
|
||||
elif isinstance(cur_item, QueueItemUpdate):
|
||||
update_type = 'Normal'
|
||||
if isinstance(cur_item, QueueItemForceUpdate):
|
||||
update_type = 'Forced'
|
||||
elif isinstance(cur_item, QueueItemForceUpdateWeb):
|
||||
update_type = 'Forced Web'
|
||||
length['update'].append([cur_item.show_name, cur_item.scheduled_update, update_type])
|
||||
length['update'].append({'name': cur_item.show_name, 'indexerid': cur_item.show.indexerid,
|
||||
'indexer': cur_item.show.indexer, 'scheduled_update': cur_item.scheduled_update,
|
||||
'update_type': update_type})
|
||||
elif isinstance(cur_item, QueueItemRefresh):
|
||||
length['refresh'].append([cur_item.show_name, cur_item.scheduled_update])
|
||||
length['refresh'].append({'name': cur_item.show_name, 'indexerid': cur_item.show.indexerid,
|
||||
'indexer': cur_item.show.indexer, 'scheduled_update': cur_item.scheduled_update})
|
||||
elif isinstance(cur_item, QueueItemRename):
|
||||
length['rename'].append([cur_item.show_name, cur_item.scheduled_update])
|
||||
length['rename'].append({'name': cur_item.show_name, 'indexerid': cur_item.show.indexerid,
|
||||
'indexer': cur_item.show.indexer, 'scheduled_update': cur_item.scheduled_update})
|
||||
elif isinstance(cur_item, QueueItemSubtitle):
|
||||
length['subtitle'].append([cur_item.show_name, cur_item.scheduled_update])
|
||||
length['subtitle'].append({'name': cur_item.show_name, 'indexerid': cur_item.show.indexerid,
|
||||
'indexer': cur_item.show.indexer, 'scheduled_update': cur_item.scheduled_update})
|
||||
return length
|
||||
|
||||
loadingShowList = property(_getLoadingShowList)
|
||||
|
||||
def updateShow(self, show, force=False, web=False, scheduled_update=False):
|
||||
def updateShow(self, show, force=False, web=False, scheduled_update=False,
|
||||
priority=generic_queue.QueuePriorities.NORMAL, **kwargs):
|
||||
|
||||
if self.isBeingAdded(show):
|
||||
raise exceptions.CantUpdateException(
|
||||
|
@ -121,17 +127,18 @@ class ShowQueue(generic_queue.GenericQueue):
|
|||
'This show is already being updated, can\'t update again until it\'s done.')
|
||||
|
||||
if not force:
|
||||
queueItemObj = QueueItemUpdate(show, scheduled_update=scheduled_update)
|
||||
queueItemObj = QueueItemUpdate(show, scheduled_update=scheduled_update, **kwargs)
|
||||
elif web:
|
||||
queueItemObj = QueueItemForceUpdateWeb(show, scheduled_update=scheduled_update)
|
||||
queueItemObj = QueueItemForceUpdateWeb(show, scheduled_update=scheduled_update, priority=priority, **kwargs)
|
||||
else:
|
||||
queueItemObj = QueueItemForceUpdate(show, scheduled_update=scheduled_update)
|
||||
queueItemObj = QueueItemForceUpdate(show, scheduled_update=scheduled_update, **kwargs)
|
||||
|
||||
self.add_item(queueItemObj)
|
||||
|
||||
return queueItemObj
|
||||
|
||||
def refreshShow(self, show, force=False, scheduled_update=False, after_update=False):
|
||||
def refreshShow(self, show, force=False, scheduled_update=False, after_update=False,
|
||||
priority=generic_queue.QueuePriorities.HIGH, **kwargs):
|
||||
|
||||
if self.isBeingRefreshed(show) and not force:
|
||||
raise exceptions.CantRefreshException('This show is already being refreshed, not refreshing again.')
|
||||
|
@ -142,7 +149,7 @@ class ShowQueue(generic_queue.GenericQueue):
|
|||
logger.DEBUG)
|
||||
return
|
||||
|
||||
queueItemObj = QueueItemRefresh(show, force=force, scheduled_update=scheduled_update)
|
||||
queueItemObj = QueueItemRefresh(show, force=force, scheduled_update=scheduled_update, priority=priority, **kwargs)
|
||||
|
||||
self.add_item(queueItemObj)
|
||||
|
||||
|
@ -458,6 +465,9 @@ class QueueItemAdd(ShowQueueItem):
|
|||
|
||||
self.show.flushEpisodes()
|
||||
|
||||
# load ids
|
||||
self.show.ids
|
||||
|
||||
# if sickbeard.USE_TRAKT:
|
||||
# # if there are specific episodes that need to be added by trakt
|
||||
# sickbeard.traktCheckerScheduler.action.manageNewShow(self.show)
|
||||
|
@ -485,15 +495,17 @@ class QueueItemAdd(ShowQueueItem):
|
|||
|
||||
|
||||
class QueueItemRefresh(ShowQueueItem):
|
||||
def __init__(self, show=None, force=False, scheduled_update=False):
|
||||
def __init__(self, show=None, force=False, scheduled_update=False, priority=generic_queue.QueuePriorities.HIGH, **kwargs):
|
||||
ShowQueueItem.__init__(self, ShowQueueActions.REFRESH, show, scheduled_update)
|
||||
|
||||
# do refreshes first because they're quick
|
||||
self.priority = generic_queue.QueuePriorities.HIGH
|
||||
self.priority = priority
|
||||
|
||||
# force refresh certain items
|
||||
self.force = force
|
||||
|
||||
self.kwargs = kwargs
|
||||
|
||||
def run(self):
|
||||
ShowQueueItem.run(self)
|
||||
|
||||
|
@ -509,6 +521,8 @@ class QueueItemRefresh(ShowQueueItem):
|
|||
if self.show.indexerid in sickbeard.scene_exceptions.xem_ids_list[self.show.indexer]:
|
||||
sickbeard.scene_numbering.xem_refresh(self.show.indexerid, self.show.indexer)
|
||||
|
||||
if 'pausestatus_after' in self.kwargs and self.kwargs['pausestatus_after'] is not None:
|
||||
self.show.paused = self.kwargs['pausestatus_after']
|
||||
self.inProgress = False
|
||||
|
||||
|
||||
|
@ -568,10 +582,11 @@ class QueueItemSubtitle(ShowQueueItem):
|
|||
|
||||
|
||||
class QueueItemUpdate(ShowQueueItem):
|
||||
def __init__(self, show=None, scheduled_update=False):
|
||||
def __init__(self, show=None, scheduled_update=False, **kwargs):
|
||||
ShowQueueItem.__init__(self, ShowQueueActions.UPDATE, show, scheduled_update)
|
||||
self.force = False
|
||||
self.force_web = False
|
||||
self.kwargs = kwargs
|
||||
|
||||
def run(self):
|
||||
|
||||
|
@ -642,18 +657,24 @@ class QueueItemUpdate(ShowQueueItem):
|
|||
except exceptions.EpisodeDeletedException:
|
||||
pass
|
||||
|
||||
sickbeard.showQueueScheduler.action.refreshShow(self.show, self.force, self.scheduled_update, after_update=True)
|
||||
if self.priority != generic_queue.QueuePriorities.NORMAL:
|
||||
self.kwargs['priority'] = self.priority
|
||||
sickbeard.showQueueScheduler.action.refreshShow(self.show, self.force, self.scheduled_update, after_update=True,
|
||||
**self.kwargs)
|
||||
|
||||
|
||||
class QueueItemForceUpdate(QueueItemUpdate):
|
||||
def __init__(self, show=None, scheduled_update=False):
|
||||
def __init__(self, show=None, scheduled_update=False, **kwargs):
|
||||
ShowQueueItem.__init__(self, ShowQueueActions.FORCEUPDATE, show, scheduled_update)
|
||||
self.force = True
|
||||
self.force_web = False
|
||||
self.kwargs = kwargs
|
||||
|
||||
|
||||
class QueueItemForceUpdateWeb(QueueItemUpdate):
|
||||
def __init__(self, show=None, scheduled_update=False):
|
||||
def __init__(self, show=None, scheduled_update=False, priority=generic_queue.QueuePriorities.NORMAL, **kwargs):
|
||||
ShowQueueItem.__init__(self, ShowQueueActions.FORCEUPDATE, show, scheduled_update)
|
||||
self.force = True
|
||||
self.force_web = True
|
||||
self.priority = priority
|
||||
self.kwargs = kwargs
|
||||
|
|
|
@ -30,7 +30,8 @@ from sickbeard import db
|
|||
from sickbeard import network_timezones
|
||||
from sickbeard import failed_history
|
||||
|
||||
class ShowUpdater():
|
||||
|
||||
class ShowUpdater:
|
||||
def __init__(self):
|
||||
self.amActive = False
|
||||
|
||||
|
@ -58,49 +59,61 @@ class ShowUpdater():
|
|||
# clear the data of unused providers
|
||||
sickbeard.helpers.clear_unused_providers()
|
||||
|
||||
# add missing mapped ids
|
||||
if not sickbeard.background_mapping_task.is_alive():
|
||||
logger.log(u'Updating the Indexer mappings')
|
||||
import threading
|
||||
sickbeard.background_mapping_task = threading.Thread(
|
||||
name='LOAD-MAPPINGS', target=sickbeard.indexermapper.load_mapped_ids, kwargs={'update': True})
|
||||
sickbeard.background_mapping_task.start()
|
||||
|
||||
logger.log(u'Doing full update on all shows')
|
||||
|
||||
# clean out cache directory, remove everything > 12 hours old
|
||||
sickbeard.helpers.clearCache()
|
||||
|
||||
# select 10 'Ended' tv_shows updated more than 90 days ago and all shows not updated more then 180 days ago to include in this update
|
||||
# select 10 'Ended' tv_shows updated more than 90 days ago
|
||||
# and all shows not updated more then 180 days ago to include in this update
|
||||
stale_should_update = []
|
||||
stale_update_date = (update_date - datetime.timedelta(days=90)).toordinal()
|
||||
stale_update_date_max = (update_date - datetime.timedelta(days=180)).toordinal()
|
||||
|
||||
# last_update_date <= 90 days, sorted ASC because dates are ordinal
|
||||
myDB = db.DBConnection()
|
||||
sql_results = myDB.mass_action([[
|
||||
'SELECT indexer_id FROM tv_shows WHERE last_update_indexer <= ? AND last_update_indexer >= ? ORDER BY last_update_indexer ASC LIMIT 10;',
|
||||
[stale_update_date, stale_update_date_max]], ['SELECT indexer_id FROM tv_shows WHERE last_update_indexer < ?;', [stale_update_date_max]]])
|
||||
my_db = db.DBConnection()
|
||||
sql_results = my_db.mass_action([
|
||||
['SELECT indexer_id FROM tv_shows WHERE last_update_indexer <= ? AND ' +
|
||||
'last_update_indexer >= ? ORDER BY last_update_indexer ASC LIMIT 10;',
|
||||
[stale_update_date, stale_update_date_max]],
|
||||
['SELECT indexer_id FROM tv_shows WHERE last_update_indexer < ?;', [stale_update_date_max]]])
|
||||
|
||||
for sql_result in sql_results:
|
||||
for cur_result in sql_result:
|
||||
stale_should_update.append(int(cur_result['indexer_id']))
|
||||
|
||||
# start update process
|
||||
piList = []
|
||||
pi_list = []
|
||||
for curShow in sickbeard.showList:
|
||||
|
||||
try:
|
||||
# get next episode airdate
|
||||
curShow.nextEpisode()
|
||||
|
||||
# if should_update returns True (not 'Ended') or show is selected stale 'Ended' then update, otherwise just refresh
|
||||
# if should_update returns True (not 'Ended') or show is selected stale 'Ended' then update,
|
||||
# otherwise just refresh
|
||||
if curShow.should_update(update_date=update_date) or curShow.indexerid in stale_should_update:
|
||||
curQueueItem = sickbeard.showQueueScheduler.action.updateShow(curShow, scheduled_update=True) # @UndefinedVariable
|
||||
cur_queue_item = sickbeard.showQueueScheduler.action.updateShow(curShow, scheduled_update=True)
|
||||
else:
|
||||
logger.log(
|
||||
u'Not updating episodes for show ' + curShow.name + ' because it\'s marked as ended and last/next episode is not within the grace period.',
|
||||
logger.DEBUG)
|
||||
curQueueItem = sickbeard.showQueueScheduler.action.refreshShow(curShow, True, True) # @UndefinedVariable
|
||||
u'Not updating episodes for show ' + curShow.name + ' because it\'s marked as ended and ' +
|
||||
'last/next episode is not within the grace period.', logger.DEBUG)
|
||||
cur_queue_item = sickbeard.showQueueScheduler.action.refreshShow(curShow, True, True)
|
||||
|
||||
piList.append(curQueueItem)
|
||||
pi_list.append(cur_queue_item)
|
||||
|
||||
except (exceptions.CantUpdateException, exceptions.CantRefreshException) as e:
|
||||
logger.log(u'Automatic update failed: ' + ex(e), logger.ERROR)
|
||||
|
||||
ui.ProgressIndicators.setIndicator('dailyUpdate', ui.QueueProgressIndicator('Daily Update', piList))
|
||||
ui.ProgressIndicators.setIndicator('dailyUpdate', ui.QueueProgressIndicator('Daily Update', pi_list))
|
||||
|
||||
logger.log(u'Added all shows to show queue for full update')
|
||||
|
||||
|
|
190
sickbeard/tv.py
|
@ -1,4 +1,4 @@
|
|||
# Author: Nic Wolfe <nic@wolfeden.ca>
|
||||
# Author: Nic Wolfe <nic@wolfeden.ca>
|
||||
# URL: http://code.google.com/p/sickbeard/
|
||||
#
|
||||
# This file is part of SickGear.
|
||||
|
@ -34,6 +34,7 @@ import xml.etree.cElementTree as etree
|
|||
from name_parser.parser import NameParser, InvalidNameException, InvalidShowException
|
||||
|
||||
from lib import subliminal
|
||||
import fnmatch
|
||||
|
||||
try:
|
||||
from lib.send2trash import send2trash
|
||||
|
@ -43,7 +44,7 @@ except ImportError:
|
|||
from lib.imdb import imdb
|
||||
|
||||
from sickbeard import db
|
||||
from sickbeard import helpers, exceptions, logger, name_cache
|
||||
from sickbeard import helpers, exceptions, logger, name_cache, indexermapper
|
||||
from sickbeard.exceptions import ex
|
||||
from sickbeard import image_cache
|
||||
from sickbeard import notifiers
|
||||
|
@ -52,6 +53,8 @@ from sickbeard import subtitles
|
|||
from sickbeard import history
|
||||
from sickbeard import network_timezones
|
||||
from sickbeard.blackandwhitelist import BlackAndWhiteList
|
||||
from sickbeard.indexermapper import del_mapping, save_mapping, MapStatus
|
||||
from sickbeard.generic_queue import QueuePriorities
|
||||
|
||||
from sickbeard import encodingKludge as ek
|
||||
|
||||
|
@ -101,6 +104,7 @@ class TVShow(object):
|
|||
self._rls_require_words = ''
|
||||
self._overview = ''
|
||||
self._tag = ''
|
||||
self._mapped_ids = {}
|
||||
|
||||
self.dirty = True
|
||||
|
||||
|
@ -147,6 +151,28 @@ class TVShow(object):
|
|||
overview = property(lambda self: self._overview, dirty_setter('_overview'))
|
||||
tag = property(lambda self: self._tag, dirty_setter('_tag'))
|
||||
|
||||
@property
|
||||
def ids(self):
|
||||
if not self._mapped_ids:
|
||||
acquired_lock = self.lock.acquire(False)
|
||||
if acquired_lock:
|
||||
try:
|
||||
indexermapper.map_indexers_to_show(self)
|
||||
finally:
|
||||
self.lock.release()
|
||||
return self._mapped_ids
|
||||
|
||||
@ids.setter
|
||||
def ids(self, value):
|
||||
if isinstance(value, dict):
|
||||
for k, v in value.iteritems():
|
||||
if k not in sickbeard.indexermapper.indexer_list or not isinstance(v, dict) or \
|
||||
not isinstance(v.get('id'), (int, long)) or not isinstance(v.get('status'), (int, long)) or \
|
||||
v.get('status') not in indexermapper.MapStatus.allstatus or \
|
||||
not isinstance(v.get('date'), datetime.date):
|
||||
return
|
||||
self._mapped_ids = value
|
||||
|
||||
@property
|
||||
def is_anime(self):
|
||||
if int(self.anime) > 0:
|
||||
|
@ -846,7 +872,8 @@ class TVShow(object):
|
|||
if not self.tag:
|
||||
self.tag = 'Show List'
|
||||
|
||||
logger.log('%s: Show info [%s] loaded from database' % (self.indexerid, self.name))
|
||||
logger.log('Loaded.. {: <9} {: <8} {}'.format(
|
||||
sickbeard.indexerApi(self.indexer).config.get('name') + ',', str(self.indexerid) + ',', self.name))
|
||||
|
||||
# Get IMDb_info from database
|
||||
myDB = db.DBConnection()
|
||||
|
@ -855,7 +882,8 @@ class TVShow(object):
|
|||
if 0 < len(sqlResults):
|
||||
self.imdb_info = dict(zip(sqlResults[0].keys(), sqlResults[0]))
|
||||
elif sickbeard.USE_IMDB_INFO:
|
||||
logger.log('%s: Unable to find IMDb show info in the database for [%s]' % (self.indexerid, self.name))
|
||||
logger.log('%s: The next show update will attempt to find IMDb info for [%s]' %
|
||||
(self.indexerid, self.name), logger.DEBUG)
|
||||
return
|
||||
|
||||
self.dirty = False
|
||||
|
@ -931,10 +959,10 @@ class TVShow(object):
|
|||
|
||||
def _get_imdb_info(self):
|
||||
|
||||
if not self.imdbid:
|
||||
if not self.imdbid and self.ids.get(indexermapper.INDEXER_IMDB, {'id': 0}).get('id', 0) <= 0:
|
||||
return
|
||||
|
||||
imdb_info = {'imdb_id': self.imdbid,
|
||||
imdb_info = {'imdb_id': self.imdbid or 'tt%07d' % self.ids[indexermapper.INDEXER_IMDB]['id'],
|
||||
'title': '',
|
||||
'year': '',
|
||||
'akas': [],
|
||||
|
@ -948,7 +976,7 @@ class TVShow(object):
|
|||
'last_update': ''}
|
||||
|
||||
i = imdb.IMDb()
|
||||
imdbTv = i.get_movie(str(re.sub('[^0-9]', '', self.imdbid)))
|
||||
imdbTv = i.get_movie(str(re.sub('[^0-9]', '', self.imdbid or '%07d' % self.ids[indexermapper.INDEXER_IMDB]['id'])))
|
||||
|
||||
for key in filter(lambda x: x.replace('_', ' ') in imdbTv.keys(), imdb_info.keys()):
|
||||
# Store only the first value for string type
|
||||
|
@ -1045,7 +1073,9 @@ class TVShow(object):
|
|||
|
||||
# clear the cache
|
||||
image_cache_dir = ek.ek(os.path.join, sickbeard.CACHE_DIR, 'images')
|
||||
for cache_file in ek.ek(glob.glob, ek.ek(os.path.join, image_cache_dir, str(self.indexerid) + '.*')):
|
||||
for path, dirs, files in ek.ek(os.walk, image_cache_dir):
|
||||
for filename in ek.ek(fnmatch.filter, files, '%s.*' % self.indexerid):
|
||||
cache_file = ek.ek(os.path.join, path, filename)
|
||||
logger.log('Attempt to %s cache file %s' % (action, cache_file))
|
||||
try:
|
||||
if sickbeard.TRASH_REMOVE_SHOW:
|
||||
|
@ -1171,6 +1201,48 @@ class TVShow(object):
|
|||
logger.log('Error occurred when downloading subtitles: %s' % traceback.format_exc(), logger.DEBUG)
|
||||
return
|
||||
|
||||
def switchIndexer(self, old_indexer, old_indexerid, pausestatus_after=None):
|
||||
myDB = db.DBConnection()
|
||||
myDB.mass_action([['UPDATE tv_shows SET indexer = ?, indexer_id = ? WHERE indexer = ? AND indexer_id = ?',
|
||||
[self.indexer, self.indexerid, old_indexer, old_indexerid]],
|
||||
['UPDATE tv_episodes SET showid = ?, indexer = ?, indexerid = 0 WHERE indexer = ? AND showid = ?',
|
||||
[self.indexerid, self.indexer, old_indexer, old_indexerid]],
|
||||
['UPDATE blacklist SET show_id = ? WHERE show_id = ?', [self.indexerid, old_indexerid]],
|
||||
['UPDATE history SET showid = ? WHERE showid = ?', [self.indexerid, old_indexerid]],
|
||||
['UPDATE imdb_info SET indexer_id = ? WHERE indexer_id = ?', [self.indexerid, old_indexerid]],
|
||||
['UPDATE scene_exceptions SET indexer_id = ? WHERE indexer_id = ?', [self.indexerid, old_indexerid]],
|
||||
['UPDATE scene_numbering SET indexer = ?, indexer_id = ? WHERE indexer = ? AND indexer_id = ?',
|
||||
[self.indexer, self.indexerid, old_indexer, old_indexerid]],
|
||||
['UPDATE whitelist SET show_id = ? WHERE show_id = ?', [self.indexerid, old_indexerid]],
|
||||
['UPDATE xem_refresh SET indexer = ?, indexer_id = ? WHERE indexer = ? AND indexer_id = ?',
|
||||
[self.indexer, self.indexerid, old_indexer, old_indexerid]]])
|
||||
|
||||
myFailedDB = db.DBConnection('failed.db')
|
||||
myFailedDB.action('UPDATE history SET showid = ? WHERE showid = ?', [self.indexerid, old_indexerid])
|
||||
del_mapping(old_indexer, old_indexerid)
|
||||
self.ids[old_indexer]['status'] = MapStatus.NONE
|
||||
self.ids[self.indexer]['status'] = MapStatus.SOURCE
|
||||
save_mapping(self)
|
||||
name_cache.remove_from_namecache(old_indexerid)
|
||||
|
||||
image_cache_dir = ek.ek(os.path.join, sickbeard.CACHE_DIR, 'images')
|
||||
for path, dirs, files in ek.ek(os.walk, image_cache_dir):
|
||||
for filename in ek.ek(fnmatch.filter, files, '%s.*' % old_indexerid):
|
||||
cache_file = ek.ek(os.path.join, path, filename)
|
||||
new_cachefile = ek.ek(os.path.join, path, filename.replace(str(old_indexerid), str(self.indexerid)))
|
||||
try:
|
||||
helpers.moveFile(cache_file, new_cachefile)
|
||||
except Exception as e:
|
||||
logger.log('Unable to rename %s to %s: %s / %s' % (cache_file, new_cachefile, repr(e), str(e)), logger.WARNING)
|
||||
|
||||
name_cache.buildNameCache(self)
|
||||
|
||||
# force the update
|
||||
try:
|
||||
sickbeard.showQueueScheduler.action.updateShow(
|
||||
self, force=True, web=True, priority=QueuePriorities.VERYHIGH, pausestatus_after=pausestatus_after)
|
||||
except exceptions.CantUpdateException as e:
|
||||
logger.log('Unable to update this show. %s' % ex(e), logger.ERROR)
|
||||
|
||||
def saveToDB(self, forceSave=False):
|
||||
|
||||
|
@ -1649,9 +1721,9 @@ class TVEpisode(object):
|
|||
|
||||
def loadFromIndexer(self, season=None, episode=None, cache=True, tvapi=None, cachedSeason=None, update=False):
|
||||
|
||||
if season is None:
|
||||
if None is season:
|
||||
season = self.season
|
||||
if episode is None:
|
||||
if None is episode:
|
||||
episode = self.episode
|
||||
|
||||
logger.log('%s: Loading episode details from %s for episode %sx%s' %
|
||||
|
@ -1660,8 +1732,8 @@ class TVEpisode(object):
|
|||
indexer_lang = self.show.lang
|
||||
|
||||
try:
|
||||
if cachedSeason is None:
|
||||
if tvapi is None:
|
||||
if None is cachedSeason:
|
||||
if None is tvapi:
|
||||
lINDEXER_API_PARMS = sickbeard.indexerApi(self.indexer).api_params.copy()
|
||||
|
||||
if not cache:
|
||||
|
@ -1670,7 +1742,7 @@ class TVEpisode(object):
|
|||
if indexer_lang:
|
||||
lINDEXER_API_PARMS['language'] = indexer_lang
|
||||
|
||||
if self.show.dvdorder != 0:
|
||||
if 0 != self.show.dvdorder:
|
||||
lINDEXER_API_PARMS['dvdorder'] = True
|
||||
|
||||
t = sickbeard.indexerApi(self.indexer).indexer(**lINDEXER_API_PARMS)
|
||||
|
@ -1695,27 +1767,27 @@ class TVEpisode(object):
|
|||
logger.log('Unable to find the episode on %s... has it been removed? Should I delete from db?' %
|
||||
sickbeard.indexerApi(self.indexer).name, logger.DEBUG)
|
||||
# if I'm no longer on the Indexers but I once was then delete myself from the DB
|
||||
if self.indexerid != -1:
|
||||
if -1 != self.indexerid:
|
||||
self.deleteEpisode()
|
||||
return
|
||||
|
||||
if not sickbeard.ALLOW_INCOMPLETE_SHOWDATA and getattr(myEp, 'episodename', None) is None:
|
||||
if not sickbeard.ALLOW_INCOMPLETE_SHOWDATA and None is getattr(myEp, 'episodename', None):
|
||||
logger.log('This episode (%s - %sx%s) has no name on %s' %
|
||||
(self.show.name, season, episode, sickbeard.indexerApi(self.indexer).name))
|
||||
# if I'm incomplete on TVDB but I once was complete then just delete myself from the DB for now
|
||||
if self.indexerid != -1:
|
||||
if -1 != self.indexerid:
|
||||
self.deleteEpisode()
|
||||
return False
|
||||
|
||||
if getattr(myEp, 'absolute_number', None) is None:
|
||||
if None is getattr(myEp, 'absolute_number', None):
|
||||
logger.log('This episode (%s - %sx%s) has no absolute number on %s' %
|
||||
(self.show.name, season, episode, sickbeard.indexerApi(self.indexer).name), logger.DEBUG)
|
||||
else:
|
||||
logger.log("%s: The absolute_number for %sx%s is : %s" %
|
||||
(self.show.indexerid, season, episode, myEp["absolute_number"]), logger.DEBUG)
|
||||
self.absolute_number = int(myEp["absolute_number"])
|
||||
logger.log('%s: The absolute_number for %sx%s is : %s' %
|
||||
(self.show.indexerid, season, episode, myEp['absolute_number']), logger.DEBUG)
|
||||
self.absolute_number = int(myEp['absolute_number'])
|
||||
|
||||
self.name = getattr(myEp, 'episodename', "")
|
||||
self.name = getattr(myEp, 'episodename', '')
|
||||
self.season = season
|
||||
self.episode = episode
|
||||
|
||||
|
@ -1733,12 +1805,12 @@ class TVEpisode(object):
|
|||
self.season, self.episode
|
||||
)
|
||||
|
||||
self.description = getattr(myEp, 'overview', "")
|
||||
self.description = getattr(myEp, 'overview', '')
|
||||
|
||||
firstaired = getattr(myEp, 'firstaired', None)
|
||||
if firstaired is None or firstaired in "0000-00-00":
|
||||
if None is firstaired or firstaired in '0000-00-00':
|
||||
firstaired = str(datetime.date.fromordinal(1))
|
||||
rawAirdate = [int(x) for x in firstaired.split("-")]
|
||||
rawAirdate = [int(x) for x in firstaired.split('-')]
|
||||
|
||||
old_airdate_future = self.airdate == datetime.date.fromordinal(1) or self.airdate >= datetime.date.today()
|
||||
try:
|
||||
|
@ -1747,15 +1819,15 @@ class TVEpisode(object):
|
|||
logger.log('Malformed air date retrieved from %s (%s - %sx%s)' %
|
||||
(sickbeard.indexerApi(self.indexer).name, self.show.name, season, episode), logger.ERROR)
|
||||
# if I'm incomplete on TVDB but I once was complete then just delete myself from the DB for now
|
||||
if self.indexerid != -1:
|
||||
if -1 != self.indexerid:
|
||||
self.deleteEpisode()
|
||||
return False
|
||||
|
||||
# early conversion to int so that episode doesn't get marked dirty
|
||||
self.indexerid = getattr(myEp, 'id', None)
|
||||
if self.indexerid is None:
|
||||
if None is self.indexerid:
|
||||
logger.log('Failed to retrieve ID from %s' % sickbeard.indexerApi(self.indexer).name, logger.ERROR)
|
||||
if self.indexerid != -1:
|
||||
if -1 != self.indexerid:
|
||||
self.deleteEpisode()
|
||||
return False
|
||||
|
||||
|
@ -1774,56 +1846,60 @@ class TVEpisode(object):
|
|||
if not ek.ek(os.path.isfile, self.location):
|
||||
|
||||
today = datetime.date.today()
|
||||
future_airtime = self.airdate > today + datetime.timedelta(days=1) or \
|
||||
(not self.airdate < today - datetime.timedelta(days=1) and
|
||||
network_timezones.parse_date_time(self.airdate.toordinal(), self.show.airs, self.show.network) +
|
||||
datetime.timedelta(minutes=helpers.tryInt(self.show.runtime, 60)) > datetime.datetime.now(network_timezones.sb_timezone))
|
||||
delta = datetime.timedelta(days=1)
|
||||
show_time = network_timezones.parse_date_time(self.airdate.toordinal(), self.show.airs, self.show.network)
|
||||
show_length = datetime.timedelta(minutes=helpers.tryInt(self.show.runtime, 60))
|
||||
tz_now = datetime.datetime.now(network_timezones.sb_timezone)
|
||||
future_airtime = (self.airdate > (today + delta) or
|
||||
(not self.airdate < (today - delta) and ((show_time + show_length) > tz_now)))
|
||||
|
||||
# if it hasn't aired yet set the status to UNAIRED
|
||||
# if this episode hasn't aired yet set the status to UNAIRED
|
||||
if future_airtime and self.status in [SKIPPED, UNAIRED, UNKNOWN, WANTED]:
|
||||
logger.log('Episode airs in the future, marking it %s' % statusStrings[UNAIRED], logger.DEBUG)
|
||||
msg = 'Episode airs in the future, marking it %s'
|
||||
self.status = UNAIRED
|
||||
|
||||
# if there's no airdate then set it to skipped (and respect ignored)
|
||||
# if there's no airdate then set it to unaired (and respect ignored)
|
||||
elif self.airdate == datetime.date.fromordinal(1):
|
||||
if self.status == IGNORED:
|
||||
logger.log('Episode has no air date, but it\'s already marked as ignored', logger.DEBUG)
|
||||
if IGNORED == self.status:
|
||||
msg = 'Episode has no air date and marked %s, no change'
|
||||
else:
|
||||
logger.log('Episode has no air date, automatically marking it skipped', logger.DEBUG)
|
||||
self.status = SKIPPED
|
||||
msg = 'Episode has no air date, marking it %s'
|
||||
self.status = UNAIRED
|
||||
|
||||
# if we don't have the file and the airdate is in the past
|
||||
# if the airdate is in the past
|
||||
else:
|
||||
if self.status == UNAIRED:
|
||||
if 0 < self.season:
|
||||
self.status = WANTED
|
||||
else:
|
||||
self.status = SKIPPED
|
||||
if UNAIRED == self.status:
|
||||
msg = ('Episode status %s%s, with air date in the past, marking it ' % (
|
||||
statusStrings[self.status], ','.join([(' is a special', '')[0 < self.season],
|
||||
('', ' is paused')[self.show.paused]])) + '%s')
|
||||
self.status = (SKIPPED, WANTED)[0 < self.season and not self.show.paused]
|
||||
|
||||
# if we somehow are still UNKNOWN then just skip it
|
||||
elif self.status == UNKNOWN or (old_airdate_future and self.status == SKIPPED):
|
||||
if update and not self.show.paused and 0 < self.season:
|
||||
self.status = WANTED
|
||||
else:
|
||||
self.status = SKIPPED
|
||||
# if still UNKNOWN or SKIPPED with the deprecated future airdate method
|
||||
elif UNKNOWN == self.status or (SKIPPED == self.status and old_airdate_future):
|
||||
msg = ('Episode status %s%s, with air date in the past, marking it ' % (
|
||||
statusStrings[self.status], ','.join([
|
||||
('', ' has old future date format')[SKIPPED == self.status and old_airdate_future],
|
||||
('', ' is being updated')[bool(update)], (' is a special', '')[0 < self.season]])) + '%s')
|
||||
self.status = (SKIPPED, WANTED)[update and not self.show.paused and 0 < self.season]
|
||||
|
||||
else:
|
||||
logger.log(
|
||||
'Not touching status because we have no episode file, the airdate is in the past, and the status is %s' %
|
||||
statusStrings[self.status], logger.DEBUG)
|
||||
msg = 'Not touching episode status %s, with air date in the past, because there is no file'
|
||||
|
||||
logger.log(msg % statusStrings[self.status], logger.DEBUG)
|
||||
|
||||
# if we have a media file then it's downloaded
|
||||
elif sickbeard.helpers.has_media_ext(self.location):
|
||||
# leave propers alone, you have to either post-process them or manually change them back
|
||||
if self.status not in Quality.SNATCHED_PROPER + Quality.DOWNLOADED + Quality.SNATCHED + [ARCHIVED]:
|
||||
status_quality = Quality.statusFromNameOrFile(self.location, anime=self.show.is_anime)
|
||||
logger.log('(1) Status changes from %s to %s' % (self.status, status_quality), logger.DEBUG)
|
||||
self.status = status_quality
|
||||
msg = '(1) Status changes from %s to ' % statusStrings[self.status]
|
||||
self.status = Quality.statusFromNameOrFile(self.location, anime=self.show.is_anime)
|
||||
logger.log('%s%s' % (msg, statusStrings[self.status]), logger.DEBUG)
|
||||
|
||||
# shouldn't get here probably
|
||||
else:
|
||||
logger.log('(2) Status changes from %s to %s' % (statusStrings[self.status], statusStrings[UNKNOWN]), logger.DEBUG)
|
||||
msg = '(2) Status changes from %s to ' % statusStrings[self.status]
|
||||
self.status = UNKNOWN
|
||||
logger.log('%s%s' % (msg, statusStrings[self.status]), logger.DEBUG)
|
||||
|
||||
def loadFromNFO(self, location):
|
||||
|
||||
|
|
|
@ -27,7 +27,7 @@ from sickbeard import logger
|
|||
from sickbeard.common import Quality
|
||||
|
||||
from sickbeard import helpers, show_name_helpers
|
||||
from sickbeard.exceptions import AuthException, ex
|
||||
from sickbeard.exceptions import MultipleShowObjectsException, AuthException, ex
|
||||
from name_parser.parser import NameParser, InvalidNameException, InvalidShowException
|
||||
from sickbeard.rssfeeds import RSSFeeds
|
||||
import itertools
|
||||
|
@ -76,7 +76,7 @@ class TVCache:
|
|||
def _checkItemAuth(self, title, url):
|
||||
return True
|
||||
|
||||
def updateCache(self):
|
||||
def updateCache(self, **kwargs):
|
||||
try:
|
||||
self._checkAuth()
|
||||
except AuthException as e:
|
||||
|
@ -188,7 +188,7 @@ class TVCache:
|
|||
# if recent search hasn't used our previous results yet then don't clear the cache
|
||||
return self.lastSearch >= self.lastUpdate
|
||||
|
||||
def add_cache_entry(self, name, url, parse_result=None, indexer_id=0):
|
||||
def add_cache_entry(self, name, url, parse_result=None, indexer_id=0, id_dict=None):
|
||||
|
||||
# check if we passed in a parsed result or should we try and create one
|
||||
if not parse_result:
|
||||
|
@ -196,7 +196,16 @@ class TVCache:
|
|||
# create showObj from indexer_id if available
|
||||
showObj=None
|
||||
if indexer_id:
|
||||
try:
|
||||
showObj = helpers.findCertainShow(sickbeard.showList, indexer_id)
|
||||
except MultipleShowObjectsException:
|
||||
return None
|
||||
|
||||
if id_dict:
|
||||
try:
|
||||
showObj = helpers.find_show_by_id(sickbeard.showList, id_dict=id_dict, no_mapped_ids=False)
|
||||
except MultipleShowObjectsException:
|
||||
return None
|
||||
|
||||
try:
|
||||
np = NameParser(showObj=showObj, convert=True)
|
||||
|
|
|
@ -39,6 +39,7 @@ from sickbeard.exceptions import ex
|
|||
from sickbeard.common import SNATCHED, SNATCHED_PROPER, DOWNLOADED, SKIPPED, UNAIRED, IGNORED, ARCHIVED, WANTED, UNKNOWN
|
||||
from sickbeard.helpers import remove_article
|
||||
from common import Quality, qualityPresetStrings, statusStrings
|
||||
from sickbeard.indexers.indexer_config import *
|
||||
from sickbeard.webserve import MainHandler
|
||||
|
||||
try:
|
||||
|
@ -1096,7 +1097,7 @@ class CMD_Exceptions(ApiCall):
|
|||
|
||||
def run(self):
|
||||
""" display scene exceptions for all or a given show """
|
||||
myDB = db.DBConnection("cache.db", row_type="dict")
|
||||
myDB = db.DBConnection(row_type="dict")
|
||||
|
||||
if self.indexerid == None:
|
||||
sqlResults = myDB.select("SELECT show_name, indexer_id AS 'indexerid' FROM scene_exceptions")
|
||||
|
@ -1411,7 +1412,7 @@ class CMD_SickBeardCheckScheduler(ApiCall):
|
|||
|
||||
backlogPaused = sickbeard.searchQueueScheduler.action.is_backlog_paused() #@UndefinedVariable
|
||||
backlogRunning = sickbeard.searchQueueScheduler.action.is_backlog_in_progress() #@UndefinedVariable
|
||||
nextBacklog = sickbeard.backlogSearchScheduler.nextRun().strftime(dateFormat).decode(sickbeard.SYS_ENCODING)
|
||||
nextBacklog = sickbeard.backlogSearchScheduler.next_run().strftime(dateFormat).decode(sickbeard.SYS_ENCODING)
|
||||
|
||||
data = {"backlog_is_paused": int(backlogPaused), "backlog_is_running": int(backlogRunning),
|
||||
"last_backlog": _ordinal_to_dateForm(sqlResults[0]["last_backlog"]),
|
||||
|
@ -1819,7 +1820,7 @@ class CMD_Show(ApiCall):
|
|||
#clean up tvdb horrible airs field
|
||||
showDict["airs"] = str(showObj.airs).replace('am', ' AM').replace('pm', ' PM').replace(' ', ' ')
|
||||
showDict["indexerid"] = self.indexerid
|
||||
showDict["tvrage_id"] = helpers.mapIndexersToShow(showObj)[2]
|
||||
showDict["tvrage_id"] = showObj.ids.get(INDEXER_TVRAGE, {'id': 0})['id']
|
||||
showDict["tvrage_name"] = showObj.name
|
||||
showDict["network"] = showObj.network
|
||||
if not showDict["network"]:
|
||||
|
@ -2592,8 +2593,8 @@ class CMD_Shows(ApiCall):
|
|||
"sports": curShow.sports,
|
||||
"anime": curShow.anime,
|
||||
"indexerid": curShow.indexerid,
|
||||
"tvdbid": helpers.mapIndexersToShow(curShow)[1],
|
||||
"tvrage_id": helpers.mapIndexersToShow(curShow)[2],
|
||||
"tvdbid": curShow.ids.get(INDEXER_TVDB , {'id': 0})['id'],
|
||||
"tvrage_id": curShow.ids.get(INDEXER_TVRAGE, {'id': 0})['id'],
|
||||
"tvrage_name": curShow.name,
|
||||
"network": curShow.network,
|
||||
"show_name": curShow.name,
|
||||
|
|
|
@ -54,7 +54,8 @@ from sickbeard.scene_numbering import get_scene_numbering, set_scene_numbering,
|
|||
from sickbeard.name_cache import buildNameCache
|
||||
from sickbeard.browser import foldersAtPath
|
||||
from sickbeard.blackandwhitelist import BlackAndWhiteList, short_group_names
|
||||
from sickbeard.search_backlog import FULL_BACKLOG, LIMITED_BACKLOG
|
||||
from sickbeard.search_backlog import FORCED_BACKLOG
|
||||
from sickbeard.indexermapper import MapStatus, save_mapping, map_indexers_to_show
|
||||
from tornado import gen
|
||||
from tornado.web import RequestHandler, StaticFileHandler, authenticated
|
||||
from lib import adba
|
||||
|
@ -1370,11 +1371,128 @@ class Home(MainHandler):
|
|||
out.append('S' + str(season) + ': ' + ', '.join(names))
|
||||
return '<br/>'.join(out)
|
||||
|
||||
def switchIndexer(self, indexerid, indexer, mindexerid, mindexer, set_pause=False, mark_wanted=False):
|
||||
indexer = helpers.tryInt(indexer)
|
||||
indexerid = helpers.tryInt(indexerid)
|
||||
mindexer = helpers.tryInt(mindexer)
|
||||
mindexerid = helpers.tryInt(mindexerid)
|
||||
show_obj = sickbeard.helpers.find_show_by_id(
|
||||
sickbeard.showList, {indexer: indexerid}, no_mapped_ids=True)
|
||||
try:
|
||||
m_show_obj = sickbeard.helpers.find_show_by_id(
|
||||
sickbeard.showList, {mindexer: mindexerid}, no_mapped_ids=False)
|
||||
except exceptions.MultipleShowObjectsException:
|
||||
msg = 'Duplicate shows in DB'
|
||||
ui.notifications.message('Indexer Switch', 'Error: ' + msg)
|
||||
return {'Error': msg}
|
||||
if not show_obj or (m_show_obj and show_obj is not m_show_obj):
|
||||
msg = 'Unable to find the specified show'
|
||||
ui.notifications.message('Indexer Switch', 'Error: ' + msg)
|
||||
return {'Error': msg}
|
||||
|
||||
with show_obj.lock:
|
||||
show_obj.indexer = mindexer
|
||||
show_obj.indexerid = mindexerid
|
||||
pausestatus_after = None
|
||||
if not set_pause:
|
||||
show_obj.paused = False
|
||||
if not mark_wanted:
|
||||
show_obj.paused = True
|
||||
pausestatus_after = False
|
||||
elif not show_obj.paused:
|
||||
show_obj.paused = True
|
||||
|
||||
show_obj.switchIndexer(indexer, indexerid, pausestatus_after=pausestatus_after)
|
||||
|
||||
ui.notifications.message('Indexer Switch', 'Finished after updating the show')
|
||||
return {'Success': 'Switched to new TV info source'}
|
||||
|
||||
def saveMapping(self, show, **kwargs):
|
||||
show_obj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(show))
|
||||
response = {}
|
||||
if not show_obj:
|
||||
return json.dumps(response)
|
||||
new_ids = {}
|
||||
save_map = []
|
||||
with show_obj.lock:
|
||||
for k, v in kwargs.iteritems():
|
||||
t = re.search(r'mid-(\d+)', k)
|
||||
if t:
|
||||
i = helpers.tryInt(v, None)
|
||||
if None is not i:
|
||||
new_ids.setdefault(helpers.tryInt(t.group(1)), {'id': 0, 'status': MapStatus.NONE,
|
||||
'date': datetime.date.fromordinal(1)})['id'] = i
|
||||
else:
|
||||
t = re.search(r'lockid-(\d+)', k)
|
||||
if t:
|
||||
new_ids.setdefault(helpers.tryInt(t.group(1)), {'id': 0, 'status': MapStatus.NONE, 'date':
|
||||
datetime.date.fromordinal(1)})['status'] = (MapStatus.NONE, MapStatus.NO_AUTOMATIC_CHANGE)[
|
||||
'true' == v]
|
||||
if new_ids:
|
||||
for k, v in new_ids.iteritems():
|
||||
if None is v.get('id') or None is v.get('status'):
|
||||
continue
|
||||
if (show_obj.ids.get(k, {'id': 0}).get('id') != v.get('id') or
|
||||
(MapStatus.NO_AUTOMATIC_CHANGE == v.get('status') and
|
||||
MapStatus.NO_AUTOMATIC_CHANGE != show_obj.ids.get(
|
||||
k, {'status': MapStatus.NONE}).get('status')) or
|
||||
(MapStatus.NO_AUTOMATIC_CHANGE != v.get('status') and
|
||||
MapStatus.NO_AUTOMATIC_CHANGE == show_obj.ids.get(
|
||||
k, {'status': MapStatus.NONE}).get('status'))):
|
||||
show_obj.ids[k]['id'] = (0, v['id'])[v['id'] >= 0]
|
||||
show_obj.ids[k]['status'] = (MapStatus.NOT_FOUND, v['status'])[v['id'] != 0]
|
||||
save_map.append(k)
|
||||
if len(save_map):
|
||||
save_mapping(show_obj, save_map=save_map)
|
||||
ui.notifications.message('Mappings saved')
|
||||
else:
|
||||
ui.notifications.message('Mappings unchanged, not saving.')
|
||||
|
||||
master_ids = [show] + [kwargs.get(x) for x in 'indexer', 'mindexerid', 'mindexer']
|
||||
if all([helpers.tryInt(x) > 0 for x in master_ids]):
|
||||
master_ids += [bool(helpers.tryInt(kwargs.get(x))) for x in 'paused', 'markwanted']
|
||||
response = {'switch': self.switchIndexer(*master_ids), 'mid': kwargs['mindexerid']}
|
||||
|
||||
response.update({
|
||||
'map': {k: {r: w for r, w in v.iteritems() if r != 'date'} for k, v in show_obj.ids.iteritems()}
|
||||
})
|
||||
return json.dumps(response)
|
||||
|
||||
def forceMapping(self, show, **kwargs):
|
||||
show_obj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(show))
|
||||
if not show_obj:
|
||||
return json.dumps({})
|
||||
save_map = []
|
||||
with show_obj.lock:
|
||||
for k, v in kwargs.iteritems():
|
||||
t = re.search(r'lockid-(\d+)', k)
|
||||
if t:
|
||||
new_status = (MapStatus.NONE, MapStatus.NO_AUTOMATIC_CHANGE)['true' == v]
|
||||
old_status = show_obj.ids.get(helpers.tryInt(t.group(1)), {'status': MapStatus.NONE})['status']
|
||||
if ((MapStatus.NO_AUTOMATIC_CHANGE == new_status and
|
||||
MapStatus.NO_AUTOMATIC_CHANGE != old_status) or
|
||||
(MapStatus.NO_AUTOMATIC_CHANGE != new_status and
|
||||
MapStatus.NO_AUTOMATIC_CHANGE == old_status)):
|
||||
i = helpers.tryInt(t.group(1))
|
||||
if 'mid-%s' % i in kwargs:
|
||||
l = helpers.tryInt(kwargs['mid-%s' % i], None)
|
||||
if None is not id and id >= 0:
|
||||
show_obj.ids.setdefault(i, {'id': 0, 'status': MapStatus.NONE, 'date':
|
||||
datetime.date.fromordinal(1)})['id'] = l
|
||||
show_obj.ids.setdefault(i, {'id': 0, 'status': MapStatus.NONE, 'date':
|
||||
datetime.date.fromordinal(1)})['status'] = new_status
|
||||
save_map.append(i)
|
||||
if len(save_map):
|
||||
save_mapping(show_obj, save_map=save_map)
|
||||
map_indexers_to_show(show_obj, force=True)
|
||||
ui.notifications.message('Mapping Reloaded')
|
||||
return json.dumps({k: {r: w for r, w in v.iteritems() if 'date' != r} for k, v in show_obj.ids.iteritems()})
|
||||
|
||||
def editShow(self, show=None, location=None, anyQualities=[], bestQualities=[], exceptions_list=[],
|
||||
flatten_folders=None, paused=None, directCall=False, air_by_date=None, sports=None, dvdorder=None,
|
||||
indexerLang=None, subtitles=None, archive_firstmatch=None, rls_ignore_words=None,
|
||||
rls_require_words=None, anime=None, blacklist=None, whitelist=None,
|
||||
scene=None, tag=None, quality_preset=None):
|
||||
scene=None, tag=None, quality_preset=None, **kwargs):
|
||||
|
||||
if show is None:
|
||||
errString = 'Invalid show ID: ' + str(show)
|
||||
|
@ -2161,18 +2279,31 @@ class HomePostProcess(Home):
|
|||
return t.respond()
|
||||
|
||||
def processEpisode(self, dir=None, nzbName=None, jobName=None, quiet=None, process_method=None, force=None,
|
||||
force_replace=None, failed='0', type='auto', stream='0', **kwargs):
|
||||
force_replace=None, failed='0', type='auto', stream='0', dupekey=None, **kwargs):
|
||||
|
||||
if not dir and ('0' == failed or not nzbName):
|
||||
self.redirect('/home/postprocess/')
|
||||
else:
|
||||
showIdRegex = re.compile(r'^SickGear-([A-Za-z]*)(\d+)-')
|
||||
indexer = 0
|
||||
showObj = None
|
||||
if dupekey and showIdRegex.search(dupekey):
|
||||
m = showIdRegex.match(dupekey)
|
||||
istr = m.group(1)
|
||||
for i in sickbeard.indexerApi().indexers:
|
||||
if istr == sickbeard.indexerApi(i).config.get('dupekey'):
|
||||
indexer = i
|
||||
break
|
||||
showObj = helpers.find_show_by_id(sickbeard.showList, {indexer: int(m.group(2))},
|
||||
no_mapped_ids=True)
|
||||
result = processTV.processDir(dir.decode('utf-8') if dir else None, nzbName.decode('utf-8') if nzbName else None,
|
||||
process_method=process_method, type=type,
|
||||
cleanup='cleanup' in kwargs and kwargs['cleanup'] in ['on', '1'],
|
||||
force=force in ['on', '1'],
|
||||
force_replace=force_replace in ['on', '1'],
|
||||
failed='0' != failed,
|
||||
webhandler=self.send_message if stream != '0' else None)
|
||||
webhandler=self.send_message if stream != '0' else None,
|
||||
showObj=showObj)
|
||||
|
||||
if '0' != stream:
|
||||
return
|
||||
|
@ -3925,9 +4056,10 @@ class Manage(MainHandler):
|
|||
class ManageSearches(Manage):
|
||||
def index(self, *args, **kwargs):
|
||||
t = PageTemplate(headers=self.request.headers, file='manage_manageSearches.tmpl')
|
||||
# t.backlogPI = sickbeard.backlogSearchScheduler.action.getProgressIndicator()
|
||||
# t.backlogPI = sickbeard.backlogSearchScheduler.action.get_progress_indicator()
|
||||
t.backlogPaused = sickbeard.searchQueueScheduler.action.is_backlog_paused()
|
||||
t.backlogRunning = sickbeard.searchQueueScheduler.action.is_backlog_in_progress()
|
||||
t.backlogIsActive = sickbeard.backlogSearchScheduler.action.am_running()
|
||||
t.standardBacklogRunning = sickbeard.searchQueueScheduler.action.is_standard_backlog_in_progress()
|
||||
t.backlogRunningType = sickbeard.searchQueueScheduler.action.type_of_backlog_in_progress()
|
||||
t.recentSearchStatus = sickbeard.searchQueueScheduler.action.is_recentsearch_in_progress()
|
||||
|
@ -3945,22 +4077,12 @@ class ManageSearches(Manage):
|
|||
|
||||
self.redirect('/home/')
|
||||
|
||||
def forceLimitedBacklog(self, *args, **kwargs):
|
||||
def forceBacklog(self, *args, **kwargs):
|
||||
# force it to run the next time it looks
|
||||
if not sickbeard.searchQueueScheduler.action.is_standard_backlog_in_progress():
|
||||
sickbeard.backlogSearchScheduler.forceSearch(force_type=LIMITED_BACKLOG)
|
||||
logger.log(u'Limited Backlog search forced')
|
||||
ui.notifications.message('Limited Backlog search started')
|
||||
|
||||
time.sleep(5)
|
||||
self.redirect('/manage/manageSearches/')
|
||||
|
||||
def forceFullBacklog(self, *args, **kwargs):
|
||||
# force it to run the next time it looks
|
||||
if not sickbeard.searchQueueScheduler.action.is_standard_backlog_in_progress():
|
||||
sickbeard.backlogSearchScheduler.forceSearch(force_type=FULL_BACKLOG)
|
||||
logger.log(u'Full Backlog search forced')
|
||||
ui.notifications.message('Full Backlog search started')
|
||||
sickbeard.backlogSearchScheduler.force_search(force_type=FORCED_BACKLOG)
|
||||
logger.log(u'Backlog search forced')
|
||||
ui.notifications.message('Backlog search started')
|
||||
|
||||
time.sleep(5)
|
||||
self.redirect('/manage/manageSearches/')
|
||||
|
@ -4355,8 +4477,8 @@ class ConfigSearch(Config):
|
|||
def saveSearch(self, use_nzbs=None, use_torrents=None, nzb_dir=None, sab_username=None, sab_password=None,
|
||||
sab_apikey=None, sab_category=None, sab_host=None, nzbget_username=None, nzbget_password=None,
|
||||
nzbget_category=None, nzbget_priority=None, nzbget_host=None, nzbget_use_https=None,
|
||||
backlog_days=None, backlog_frequency=None, search_unaired=None, recentsearch_frequency=None,
|
||||
nzb_method=None, torrent_method=None, usenet_retention=None,
|
||||
backlog_days=None, backlog_frequency=None, search_unaired=None, unaired_recent_search_only=None,
|
||||
recentsearch_frequency=None, nzb_method=None, torrent_method=None, usenet_retention=None,
|
||||
download_propers=None, check_propers_interval=None, allow_high_priority=None,
|
||||
torrent_dir=None, torrent_username=None, torrent_password=None, torrent_host=None,
|
||||
torrent_label=None, torrent_path=None, torrent_verify_cert=None,
|
||||
|
@ -4405,7 +4527,8 @@ class ConfigSearch(Config):
|
|||
'%dm, %ds' % (minutes, seconds))
|
||||
logger.log(u'Change search PROPERS interval, next check %s' % run_at)
|
||||
|
||||
sickbeard.SEARCH_UNAIRED = config.checkbox_to_value(search_unaired)
|
||||
sickbeard.SEARCH_UNAIRED = bool(config.checkbox_to_value(search_unaired))
|
||||
sickbeard.UNAIRED_RECENT_SEARCH_ONLY = bool(config.checkbox_to_value(unaired_recent_search_only, value_off=1, value_on=0))
|
||||
|
||||
sickbeard.ALLOW_HIGH_PRIORITY = config.checkbox_to_value(allow_high_priority)
|
||||
|
||||
|
@ -4702,14 +4825,17 @@ class ConfigProviders(Config):
|
|||
error = '\nNo provider %s specified' % error
|
||||
return json.dumps({'success': False, 'error': error})
|
||||
|
||||
if name in [n.name for n in sickbeard.newznabProviderList if n.url == url]:
|
||||
tv_categories = newznab.NewznabProvider.clean_newznab_categories([n for n in sickbeard.newznabProviderList if n.name == name][0].all_cats)
|
||||
else:
|
||||
providers = dict(zip([x.get_id() for x in sickbeard.newznabProviderList], sickbeard.newznabProviderList))
|
||||
temp_provider = newznab.NewznabProvider(name, url, key)
|
||||
if None is not key and starify(key, True):
|
||||
temp_provider.key = providers[temp_provider.get_id()].key
|
||||
|
||||
success, tv_categories, error = temp_provider.get_newznab_categories()
|
||||
tv_categories = newznab.NewznabProvider.clean_newznab_categories(temp_provider.all_cats)
|
||||
|
||||
return json.dumps({'success': success, 'tv_categories': tv_categories, 'error': error})
|
||||
return json.dumps({'success': True, 'tv_categories': tv_categories, 'error': ''})
|
||||
|
||||
def deleteNewznabProvider(self, nnid):
|
||||
|
||||
|
|
|
@ -29,7 +29,7 @@ class SceneTests(test.SickbeardTestDBCase):
|
|||
|
||||
def test_allPossibleShowNames(self):
|
||||
# common.sceneExceptions[-1] = ['Exception Test']
|
||||
my_db = db.DBConnection('cache.db')
|
||||
my_db = db.DBConnection()
|
||||
my_db.action('INSERT INTO scene_exceptions (indexer_id, show_name, season) VALUES (?,?,?)', [-1, 'Exception Test', -1])
|
||||
common.countryList['Full Country Name'] = 'FCN'
|
||||
|
||||
|
@ -84,7 +84,7 @@ class SceneExceptionTestCase(test.SickbeardTestDBCase):
|
|||
|
||||
def test_sceneExceptionsResetNameCache(self):
|
||||
# clear the exceptions
|
||||
my_db = db.DBConnection('cache.db')
|
||||
my_db = db.DBConnection()
|
||||
my_db.action('DELETE FROM scene_exceptions')
|
||||
|
||||
# put something in the cache
|
||||
|
|