mirror of
https://github.com/SickGear/SickGear.git
synced 2024-12-02 17:33:37 +00:00
Merge branch 'feature/AddSceneFilter' into develop
This commit is contained in:
commit
ccec551726
13 changed files with 3532 additions and 1947 deletions
|
@ -4,6 +4,11 @@
|
|||
* Add HDME torrent provider
|
||||
* Add ImmortalSeed torrent provider
|
||||
* Add Xspeeds torrent provider
|
||||
* Change consolidate provider filters into 'Only allow releases that are'
|
||||
* Add provider filters, Only allow releases that are ...
|
||||
'scene releases (srrDB/predb listed)', 'or contain' text or regex,
|
||||
'non scene if no recent search results', 'non scene if no active search results',
|
||||
'not scene nuked', and 'nuked if no active search results'
|
||||
|
||||
|
||||
[develop changelog]
|
||||
|
|
|
@ -38,8 +38,10 @@
|
|||
<tr class="infoTableSeperator"><td class="infoTableHeader"><i class="icon16-sg"></i> Homepage</td><td class="infoTableCell"><a href="<%= anon_url('https://github.com/SickGear/SickGear/wiki') %>" rel="noreferrer" onclick="window.open(this.href, '_blank'); return false;">https://github.com/SickGear/SickGear/wiki</a></td></tr>
|
||||
<tr><td class="infoTableHeader"><i class="icon16-github"></i> Source</td><td class="infoTableCell"><a href="<%= anon_url('https://github.com/SickGear/SickGear/') %>" rel="noreferrer" onclick="window.open(this.href, '_blank'); return false;">https://github.com/SickGear/SickGear/</a></td></tr>
|
||||
<tr><td class="infoTableHeader"><i class="icon16-mirc"></i> Internet Relay Chat</td><td class="infoTableCell"><a href="irc://irc.freenode.net/#SickGear" rel="noreferrer"><i>#SickGear</i> on <i>irc.freenode.net</i></a></td></tr>
|
||||
<tr class="infoTableSeperator"><td class="infoTableHeader">Powered by</td><td class="infoTableCell">Python, HTML5, jQuery, SQLite, TheTVDB, Trakt.tv, Fanart.tv, TMDb, GitHub</td></tr>
|
||||
<tr><td class="infoTableHeader"> </td><td class="infoTableHeader">This project uses the TMDb API but is not endorsed or certified by TMDb.</td></tr>
|
||||
<tr class="infoTableSeperator"><td class="infoTableHeader">Powered by</td><td class="infoTableCell">Python, HTML5, jQuery, SQLite, Regex, CSS, Javascript, Tornado webserver</td></tr>
|
||||
<tr><td class="infoTableHeader"> </td><td class="infoTableHeader">Huge thanks to Jetbrains for PyCharm IDE, trust them with your development project</td></tr>
|
||||
<tr><td class="infoTableHeader">Credits to</td><td class="infoTableHeader">Also; TheTVDB, Trakt.tv, TVMaze, Fanart.tv, IMDb, TheXem, srrDB, Predb, and GitHub</td></tr>
|
||||
<tr><td class="infoTableHeader"> </td><td class="infoTableHeader">This project uses the TMDb API but is not endorsed or certified by TMDb</td></tr>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
|
|
|
@ -28,32 +28,20 @@
|
|||
#if not $sickbeard.USE_TORRENTS
|
||||
$methods_notused.append('Torrent')
|
||||
#end if
|
||||
|
||||
#slurp
|
||||
#if $sickbeard.USE_NZBS or $sickbeard.USE_TORRENTS
|
||||
<script type="text/javascript" charset="utf-8">
|
||||
<!--
|
||||
\$(document).ready(function(){
|
||||
|
||||
#if $sickbeard.USE_NZBS
|
||||
|
||||
var show_nzb_providers = <%= 'true' if sickbeard.USE_NZBS else 'false' %>;
|
||||
|
||||
#for $cur_newznab_provider in $sickbeard.newznabProviderList:
|
||||
|
||||
\$(this).addProvider('$cur_newznab_provider.get_id()', '$cur_newznab_provider.name', '$cur_newznab_provider.url', '<%= starify(cur_newznab_provider.key) %>', '$cur_newznab_provider.cat_ids', $int($cur_newznab_provider.default), show_nzb_providers);
|
||||
|
||||
\$(this).addProvider('$cur_newznab_provider.get_id()', '$cur_newznab_provider.name', '$cur_newznab_provider.url', '<%= starify(cur_newznab_provider.key) %>', '$cur_newznab_provider.cat_ids', $int($cur_newznab_provider.default), !0);
|
||||
#end for
|
||||
|
||||
#end if
|
||||
|
||||
#if $sickbeard.USE_TORRENTS
|
||||
|
||||
#for $cur_torrent_rss_provider in $sickbeard.torrentRssProviderList:
|
||||
|
||||
\$(this).addTorrentRssProvider('$cur_torrent_rss_provider.get_id()', '$cur_torrent_rss_provider.name', '$cur_torrent_rss_provider.url', '<%= starify(cur_torrent_rss_provider.cookies) %>');
|
||||
|
||||
#end for
|
||||
|
||||
#end if
|
||||
});
|
||||
//-->
|
||||
|
@ -63,6 +51,7 @@
|
|||
##
|
||||
#set $html_selected = ' selected="selected"'
|
||||
#set $html_checked = 'checked="checked" '
|
||||
#set $backlog_only_tip = False
|
||||
<div id="config" class="search_providers">
|
||||
<div id="config-content">
|
||||
|
||||
|
@ -91,7 +80,6 @@
|
|||
<p>Allows searching recent and past releases.</p>
|
||||
<p>Check off and drag providers into the order you want them to be used.</p>
|
||||
<p>At least one provider is required, two are recommended.</p>
|
||||
|
||||
#if $methods_notused
|
||||
<blockquote style="margin:20px 0"><%= '/'.join(x for x in methods_notused) %> providers can be enabled in <a href="$sbRoot/config/search/">Search Settings</a></blockquote>
|
||||
#else
|
||||
|
@ -115,13 +103,11 @@
|
|||
<input type="checkbox" id="enable_$cur_name" class="provider_enabler" <%= html_checked if cur_provider.is_enabled() else '' %>/>
|
||||
<a class="imgLink" #if $cur_url#href="<%= anon_url(cur_url) %>" onclick="window.open(this.href,'_blank');return false;"#else#name=""#end if# rel="noreferrer"><img src="$sbRoot/images/providers/$cur_provider.image_name()" alt="$tip" title="$tip" width="16" height="16" style="vertical-align:middle" /></a>
|
||||
<span style="vertical-align:middle">$cur_provider.name$state</span>
|
||||
#if $cur_provider.is_public_access() and type($cur_provider).__name__ not in ['TorrentRssProvider']
|
||||
#if $cur_provider.is_public_access() and type($cur_provider).__name__ not in ['TorrentRssProvider']#
|
||||
<span style="font-size:10px;vertical-align:top;font-weight:normal">(PA)</span>
|
||||
#end if#
|
||||
#if $show_type
|
||||
#end if##if $show_type##slurp
|
||||
<span style="font-size:10px;vertical-align:top;font-weight:normal">($cur_provider.providerType)</span>
|
||||
#end if#
|
||||
#if not $cur_provider.supports_backlog#*#end if#
|
||||
#end if##if not $cur_provider.supports_backlog#*#set $backlog_only_tip=True##end if##slurp
|
||||
<span class="ui-icon ui-icon-arrowthick-2-n-s pull-right" style="margin-top:3px"></span>
|
||||
</li>
|
||||
#end for
|
||||
|
@ -130,6 +116,9 @@
|
|||
|
||||
<div id="provider_key">
|
||||
<span style="float:left;font-size:10px;vertical-align:top;font-weight:normal">(PA)</span><p class="note">Public access, no account required</p>
|
||||
#if $backlog_only_tip
|
||||
<h4 class="note">*</h4><p class="note">No backlog, latest releases only</p>
|
||||
#end if
|
||||
## #if $sickbeard.USE_TORRENTS
|
||||
## <h4 class="note">**</h4><p class="note">Supports <b>limited</b> backlog searches, some episodes/qualities may not be available</p>
|
||||
## #end if
|
||||
|
@ -166,12 +155,11 @@
|
|||
if $x.providerType == $GenericProvider.NZB and $sickbeard.USE_NZBS or
|
||||
$x.providerType == $GenericProvider.TORRENT and $sickbeard.USE_TORRENTS]
|
||||
#if $cur_provider.is_enabled()
|
||||
$provider_config_list_enabled.append($cur_provider)
|
||||
#set void = $provider_config_list_enabled.append($cur_provider)
|
||||
#else
|
||||
$provider_config_list.append($cur_provider)
|
||||
#set void = $provider_config_list.append($cur_provider)
|
||||
#end if
|
||||
#end for
|
||||
|
||||
#if $provider_config_list + $provider_config_list_enabled
|
||||
<select id="editAProvider" class="form-control input-sm">
|
||||
#if $provider_config_list_enabled
|
||||
|
@ -200,6 +188,14 @@
|
|||
#set $recentsearch_tip = 'match recent episodes from results of latest releases'
|
||||
#set $backlogsearch_tip = 'allow active searching for individual episode releases'
|
||||
#set $scheduled_backlog_tip = 'enable scheduled searching for backlogged episodes'
|
||||
#set $filter_title = 'Only allow releases that are'
|
||||
#set $filter_scene_only_desc = 'scene releases (srrDB/predb listed)'
|
||||
#set $filter_scene_or_contain_desc = '...or contain'
|
||||
#set $filter_scene_loose_desc = 'non scene if no recent search results'
|
||||
#set $filter_scene_loose_active_desc = 'non scene if no active search results'
|
||||
#set $filter_scene_rej_nuked_desc = 'not scene nuked'
|
||||
#set $filter_scene_nuked_active_desc = 'nuked if no active search results'
|
||||
#set $filter_tip = 'nothing selected allows everything (i.e. no filtering, default)'
|
||||
#for $cur_newznab_provider in [$cur_provider for $cur_provider in $sickbeard.newznabProviderList]
|
||||
<div class="providerDiv" id="${cur_newznab_provider.get_id()}Div">
|
||||
#set $can_recent = $hasattr($cur_newznab_provider, 'enable_recentsearch')
|
||||
|
@ -211,13 +207,13 @@
|
|||
<span class="component-desc">
|
||||
#if $can_recent
|
||||
<label for="${cur_newznab_provider.get_id()}_enable_recentsearch" style="display:inline">
|
||||
<input type="checkbox" name="${cur_newznab_provider.get_id()}_enable_recentsearch" id="${cur_newznab_provider.get_id()}_enable_recentsearch" <%= html_checked if cur_newznab_provider.enable_recentsearch else '' %>/>
|
||||
<input class="view-if" type="checkbox" name="${cur_newznab_provider.get_id()}_enable_recentsearch" id="${cur_newznab_provider.get_id()}_enable_recentsearch" <%= html_checked if cur_newznab_provider.enable_recentsearch else '' %>/>
|
||||
<p>$recentsearch_tip</p>
|
||||
</label>
|
||||
#end if
|
||||
#if $can_backlog
|
||||
<label for="${cur_newznab_provider.get_id()}_enable_backlog" style="display:inline">
|
||||
<input class="enabler" type="checkbox" name="${cur_newznab_provider.get_id()}_enable_backlog" id="${cur_newznab_provider.get_id()}_enable_backlog" <%= html_checked if cur_newznab_provider.enable_backlog else '' %>/>
|
||||
<input class="enabler view-if" type="checkbox" name="${cur_newznab_provider.get_id()}_enable_backlog" id="${cur_newznab_provider.get_id()}_enable_backlog" <%= html_checked if cur_newznab_provider.enable_backlog else '' %>/>
|
||||
<p>$backlogsearch_tip</p>
|
||||
</label>
|
||||
#end if
|
||||
|
@ -252,10 +248,42 @@
|
|||
</label>
|
||||
</div>
|
||||
#end if
|
||||
#if $hasattr($cur_newznab_provider, 'may_filter'):
|
||||
<div class="field-pair">
|
||||
<span class="component-title">Allow releases that are</span>
|
||||
<span class="component-title">$filter_title</span>
|
||||
<span class="component-desc">
|
||||
<div style="margin-bottom:10px">
|
||||
<div style="float:left;max-width:230px">
|
||||
<label for="${cur_newznab_provider.get_id()}_scene_only">
|
||||
<input type="checkbox" name="${cur_newznab_provider.get_id()}_scene_only" id="${cur_newznab_provider.get_id()}_scene_only" <%= html_checked if cur_newznab_provider.scene_only else '' %>>
|
||||
<span>$filter_scene_only_desc</span>
|
||||
</label>
|
||||
<label for="${cur_newznab_provider.get_id()}_scene_or_contain">
|
||||
$filter_scene_or_contain_desc<input style="float:right;margin-left:4px;padding:2px 4px;height:24px;width:144px" type="text" name="${cur_newznab_provider.get_id()}_scene_or_contain" placeholder="(opt: start 'regex:')" value="<%= cur_newznab_provider.scene_or_contain %>" class="form-control input-sm input150">
|
||||
</label>
|
||||
</div>
|
||||
<div style="margin-left:230px">
|
||||
<label class="show-if-${cur_newznab_provider.get_id()}_enable_recentsearch" for="${cur_newznab_provider.get_id()}_scene_loose">
|
||||
<input type="checkbox" name="${cur_newznab_provider.get_id()}_scene_loose" id="${cur_newznab_provider.get_id()}_scene_loose" <%= html_checked if cur_newznab_provider.scene_loose else '' %>>
|
||||
<span>$filter_scene_loose_desc</span>
|
||||
</label>
|
||||
<label class="show-if-${cur_newznab_provider.get_id()}_enable_backlog" for="${cur_newznab_provider.get_id()}_scene_loose_active">
|
||||
<input type="checkbox" name="${cur_newznab_provider.get_id()}_scene_loose_active" id="${cur_newznab_provider.get_id()}_scene_loose_active" <%= html_checked if cur_newznab_provider.scene_loose_active else '' %>>
|
||||
<span>$filter_scene_loose_active_desc</span>
|
||||
</label>
|
||||
</div>
|
||||
<div style="clear:both">
|
||||
<label style="float:left;min-width:230px" for="${cur_newznab_provider.get_id()}_scene_rej_nuked">
|
||||
<input type="checkbox" name="${cur_newznab_provider.get_id()}_scene_rej_nuked" id="${cur_newznab_provider.get_id()}_scene_rej_nuked" <%= html_checked if cur_newznab_provider.scene_rej_nuked else '' %>>
|
||||
<span>$filter_scene_rej_nuked_desc</span>
|
||||
</label>
|
||||
<label class="show-if-${cur_newznab_provider.get_id()}_enable_backlog" for="${cur_newznab_provider.get_id()}_scene_nuked_active">
|
||||
<input type="checkbox" name="${cur_newznab_provider.get_id()}_scene_nuked_active" id="${cur_newznab_provider.get_id()}_scene_nuked_active" <%= html_checked if cur_newznab_provider.scene_nuked_active else '' %>>
|
||||
<span>$filter_scene_nuked_active_desc</span>
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
#if $hasattr($cur_newznab_provider, 'may_filter'):
|
||||
<div>
|
||||
#for $cur_fval, $filter in $cur_newznab_provider.may_filter.iteritems()
|
||||
#set $cur_fname, $cur_is_default = $filter[0], $filter[1]
|
||||
#set $filter_id = '%s_filter_%s' % ($cur_newznab_provider.get_id(), $cur_fval)
|
||||
|
@ -264,11 +292,12 @@
|
|||
<span>$cur_fname</span>
|
||||
</label>
|
||||
#end for
|
||||
<span>(see site for meaning)</span>
|
||||
<p>nothing selected allows everything (no filter, default)</p>
|
||||
</span>
|
||||
<span>(see $cur_newznab_provider.name)</span>
|
||||
</div>
|
||||
#end if
|
||||
<p style="clear:both">$filter_tip</p>
|
||||
</span>
|
||||
</div>
|
||||
#if $hasattr($cur_newznab_provider, 'search_mode') and $cur_newznab_provider.supports_backlog:
|
||||
<div class="field-pair">
|
||||
<span class="component-title">Episode search mode</span>
|
||||
|
@ -311,13 +340,13 @@
|
|||
<span class="component-desc">
|
||||
#if $can_recent
|
||||
<label for="${cur_nzb_provider.get_id()}_enable_recentsearch" style="display:inline">
|
||||
<input type="checkbox" name="${cur_nzb_provider.get_id()}_enable_recentsearch" id="${cur_nzb_provider.get_id()}_enable_recentsearch" <%= html_checked if cur_nzb_provider.enable_recentsearch else '' %>/>
|
||||
<input class="view-if" type="checkbox" name="${cur_nzb_provider.get_id()}_enable_recentsearch" id="${cur_nzb_provider.get_id()}_enable_recentsearch" <%= html_checked if cur_nzb_provider.enable_recentsearch else '' %>/>
|
||||
<p>$recentsearch_tip</p>
|
||||
</label>
|
||||
#end if
|
||||
#if $can_backlog
|
||||
<label for="${cur_nzb_provider.get_id()}_enable_backlog" style="display:inline">
|
||||
<input class="enabler" type="checkbox" name="${cur_nzb_provider.get_id()}_enable_backlog" id="${cur_nzb_provider.get_id()}_enable_backlog" <%= html_checked if cur_nzb_provider.enable_backlog else '' %>/>
|
||||
<input class="enabler view-if" type="checkbox" name="${cur_nzb_provider.get_id()}_enable_backlog" id="${cur_nzb_provider.get_id()}_enable_backlog" <%= html_checked if cur_nzb_provider.enable_backlog else '' %>/>
|
||||
<p>$backlogsearch_tip</p>
|
||||
</label>
|
||||
#end if
|
||||
|
@ -356,6 +385,43 @@
|
|||
</label>
|
||||
</div>
|
||||
#end if
|
||||
<div class="field-pair">
|
||||
<span class="component-title">$filter_title</span>
|
||||
<span class="component-desc">
|
||||
<div style="margin-bottom:10px">
|
||||
<div style="float:left;max-width:230px">
|
||||
<label for="${cur_nzb_provider.get_id()}_scene_only">
|
||||
<input type="checkbox" name="${cur_nzb_provider.get_id()}_scene_only" id="${cur_nzb_provider.get_id()}_scene_only" <%= html_checked if cur_nzb_provider.scene_only else '' %>>
|
||||
<span>$filter_scene_only_desc</span>
|
||||
</label>
|
||||
<label for="${cur_nzb_provider.get_id()}_scene_or_contain">
|
||||
$filter_scene_or_contain_desc<input style="float:right;margin-left:4px;padding:2px 4px;height:24px;width:144px" type="text" name="${cur_nzb_provider.get_id()}_scene_or_contain" placeholder="(opt: start 'regex:')" value="<%= cur_nzb_provider.scene_or_contain %>" class="form-control input-sm input150">
|
||||
</label>
|
||||
</div>
|
||||
<div style="margin-left:230px">
|
||||
<label class="show-if-${cur_nzb_provider.get_id()}_enable_recentsearch" for="${cur_nzb_provider.get_id()}_scene_loose">
|
||||
<input type="checkbox" name="${cur_nzb_provider.get_id()}_scene_loose" id="${cur_nzb_provider.get_id()}_scene_loose" <%= html_checked if cur_nzb_provider.scene_loose else '' %>>
|
||||
<span>$filter_scene_loose_desc</span>
|
||||
</label>
|
||||
<label class="show-if-${cur_nzb_provider.get_id()}_enable_backlog" for="${cur_nzb_provider.get_id()}_scene_loose_active">
|
||||
<input type="checkbox" name="${cur_nzb_provider.get_id()}_scene_loose_active" id="${cur_nzb_provider.get_id()}_scene_loose_active" <%= html_checked if cur_nzb_provider.scene_loose_active else '' %>>
|
||||
<span>$filter_scene_loose_active_desc</span>
|
||||
</label>
|
||||
</div>
|
||||
<div style="clear:both">
|
||||
<label style="float:left;min-width:230px" for="${cur_nzb_provider.get_id()}_scene_rej_nuked">
|
||||
<input type="checkbox" name="${cur_nzb_provider.get_id()}_scene_rej_nuked" id="${cur_nzb_provider.get_id()}_scene_rej_nuked" <%= html_checked if cur_nzb_provider.scene_rej_nuked else '' %>>
|
||||
<span>$filter_scene_rej_nuked_desc</span>
|
||||
</label>
|
||||
<label class="show-if-${cur_nzb_provider.get_id()}_enable_backlog" for="${cur_nzb_provider.get_id()}_scene_nuked_active">
|
||||
<input type="checkbox" name="${cur_nzb_provider.get_id()}_scene_nuked_active" id="${cur_nzb_provider.get_id()}_scene_nuked_active" <%= html_checked if cur_nzb_provider.scene_nuked_active else '' %>>
|
||||
<span>$filter_scene_nuked_active_desc</span>
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
<p style="clear:both">$filter_tip</p>
|
||||
</span>
|
||||
</div>
|
||||
#if $hasattr($cur_nzb_provider, 'search_mode') and $cur_nzb_provider.supports_backlog:
|
||||
<div class="field-pair">
|
||||
<span class="component-title">Episode search mode</span>
|
||||
|
@ -414,13 +480,13 @@
|
|||
<span class="component-desc">
|
||||
#if $can_recent
|
||||
<label for="${cur_torrent_provider.get_id()}_enable_recentsearch" style="display:inline">
|
||||
<input type="checkbox" name="${cur_torrent_provider.get_id()}_enable_recentsearch" id="${cur_torrent_provider.get_id()}_enable_recentsearch" <%= html_checked if cur_torrent_provider.enable_recentsearch else '' %>/>
|
||||
<input class="view-if" type="checkbox" name="${cur_torrent_provider.get_id()}_enable_recentsearch" id="${cur_torrent_provider.get_id()}_enable_recentsearch" <%= html_checked if cur_torrent_provider.enable_recentsearch else '' %>/>
|
||||
<p>$recentsearch_tip</p>
|
||||
</label>
|
||||
#end if
|
||||
#if $can_backlog
|
||||
<label for="${cur_torrent_provider.get_id()}_enable_backlog" style="display:inline">
|
||||
<input class="enabler" type="checkbox" name="${cur_torrent_provider.get_id()}_enable_backlog" id="${cur_torrent_provider.get_id()}_enable_backlog" <%= html_checked if cur_torrent_provider.enable_backlog else '' %>/>
|
||||
<input class="enabler view-if" type="checkbox" name="${cur_torrent_provider.get_id()}_enable_backlog" id="${cur_torrent_provider.get_id()}_enable_backlog" <%= html_checked if cur_torrent_provider.enable_backlog else '' %>/>
|
||||
<p>$backlogsearch_tip</p>
|
||||
</label>
|
||||
#end if
|
||||
|
@ -576,33 +642,63 @@ name = '' if not client else get_client_instance(sickbeard.TORRENT_METHOD)().nam
|
|||
</label>
|
||||
</div>
|
||||
#end if
|
||||
#if $hasattr($cur_torrent_provider, 'confirmed'):
|
||||
<div class="field-pair">
|
||||
<label for="${cur_torrent_provider.get_id()}_confirmed">
|
||||
<span class="component-title">Confirmed download</span>
|
||||
<span class="component-title">$filter_title</span>
|
||||
<span class="component-desc">
|
||||
<input type="checkbox" name="${cur_torrent_provider.get_id()}_confirmed" id="${cur_torrent_provider.get_id()}_confirmed" <%= html_checked if cur_torrent_provider.confirmed else '' %>/>
|
||||
#set $confirm_label = callable(getattr(cur_torrent_provider, 'ui_string', None)) and cur_torrent_provider.ui_string(cur_torrent_provider.get_id() + '_confirm') or 'only download torrents from trusted or verified uploaders ?'
|
||||
<p>$confirm_label</p>
|
||||
</span>
|
||||
<div style="margin-bottom:10px">
|
||||
<div style="float:left;max-width:230px">
|
||||
<label for="${cur_torrent_provider.get_id()}_scene_only">
|
||||
<input type="checkbox" name="${cur_torrent_provider.get_id()}_scene_only" id="${cur_torrent_provider.get_id()}_scene_only" <%= html_checked if cur_torrent_provider.scene_only else '' %>>
|
||||
<span>$filter_scene_only_desc</span>
|
||||
</label>
|
||||
<label for="${cur_torrent_provider.get_id()}_scene_or_contain">
|
||||
$filter_scene_or_contain_desc<input style="float:right;margin-left:4px;padding:2px 4px;height:24px;width:144px" type="text" name="${cur_torrent_provider.get_id()}_scene_or_contain" placeholder="(opt: start 'regex:')" value="<%= cur_torrent_provider.scene_or_contain %>" class="form-control input-sm input150">
|
||||
</label>
|
||||
</div>
|
||||
<div style="margin-left:230px">
|
||||
<label class="show-if-${cur_torrent_provider.get_id()}_enable_recentsearch" for="${cur_torrent_provider.get_id()}_scene_loose">
|
||||
<input type="checkbox" name="${cur_torrent_provider.get_id()}_scene_loose" id="${cur_torrent_provider.get_id()}_scene_loose" <%= html_checked if cur_torrent_provider.scene_loose else '' %>>
|
||||
<span>$filter_scene_loose_desc</span>
|
||||
</label>
|
||||
#if $cur_torrent_provider.supports_backlog:
|
||||
<label class="show-if-${cur_torrent_provider.get_id()}_enable_backlog" for="${cur_torrent_provider.get_id()}_scene_loose_active">
|
||||
<input type="checkbox" name="${cur_torrent_provider.get_id()}_scene_loose_active" id="${cur_torrent_provider.get_id()}_scene_loose_active" <%= html_checked if cur_torrent_provider.scene_loose_active else '' %>>
|
||||
<span>$filter_scene_loose_active_desc</span>
|
||||
</label>
|
||||
#end if
|
||||
</div>
|
||||
<div style="clear:both">
|
||||
<label style="float:left;min-width:230px" for="${cur_torrent_provider.get_id()}_scene_rej_nuked">
|
||||
<input type="checkbox" name="${cur_torrent_provider.get_id()}_scene_rej_nuked" id="${cur_torrent_provider.get_id()}_scene_rej_nuked" <%= html_checked if cur_torrent_provider.scene_rej_nuked else '' %>>
|
||||
<span>$filter_scene_rej_nuked_desc</span>
|
||||
</label>
|
||||
#if $cur_torrent_provider.supports_backlog:
|
||||
<label class="show-if-${cur_torrent_provider.get_id()}_enable_backlog" for="${cur_torrent_provider.get_id()}_scene_nuked_active">
|
||||
<input type="checkbox" name="${cur_torrent_provider.get_id()}_scene_nuked_active" id="${cur_torrent_provider.get_id()}_scene_nuked_active" <%= html_checked if cur_torrent_provider.scene_nuked_active else '' %>>
|
||||
<span>$filter_scene_nuked_active_desc</span>
|
||||
</label>
|
||||
#end if
|
||||
</div>
|
||||
</div>
|
||||
#if $hasattr($cur_torrent_provider, 'freeleech'):
|
||||
<div>
|
||||
<label for="${cur_torrent_provider.get_id()}_freeleech" class="space-right">
|
||||
<input type="checkbox" name="${cur_torrent_provider.get_id()}_freeleech" id="${cur_torrent_provider.get_id()}_freeleech" <%= html_checked if cur_torrent_provider.freeleech else '' %>/>
|
||||
<span><b>[FreeLeech]</b> only</span>
|
||||
</label>
|
||||
</div>
|
||||
#end if
|
||||
#if $hasattr($cur_torrent_provider, 'freeleech'):
|
||||
<div class="field-pair">
|
||||
<label for="${cur_torrent_provider.get_id()}_freeleech">
|
||||
<span class="component-title">Freeleech</span>
|
||||
<span class="component-desc">
|
||||
<input type="checkbox" name="${cur_torrent_provider.get_id()}_freeleech" id="${cur_torrent_provider.get_id()}_freeleech" <%= html_checked if cur_torrent_provider.freeleech else '' %>/>
|
||||
<p>only download <b>[FreeLeech]</b> torrents</p>
|
||||
</span>
|
||||
#if $hasattr($cur_torrent_provider, 'confirmed'):
|
||||
<div>
|
||||
<label for="${cur_torrent_provider.get_id()}_confirmed">
|
||||
<input type="checkbox" name="${cur_torrent_provider.get_id()}_confirmed" id="${cur_torrent_provider.get_id()}_confirmed" <%= html_checked if cur_torrent_provider.confirmed else '' %>/>
|
||||
#set $confirm_label = callable(getattr(cur_torrent_provider, 'ui_string', None)) and cur_torrent_provider.ui_string(cur_torrent_provider.get_id() + '_confirm') or 'site trusted or from verified uploaders'
|
||||
<span>$confirm_label</span>
|
||||
</label>
|
||||
</div>
|
||||
#end if
|
||||
#if $hasattr($cur_torrent_provider, 'may_filter'):
|
||||
<div class="field-pair">
|
||||
<span class="component-title">Allow releases that are</span>
|
||||
<span class="component-desc">
|
||||
<div>
|
||||
#for $cur_fval, $filter in $cur_torrent_provider.may_filter.iteritems()
|
||||
#set $cur_fname, $cur_is_default = $filter[0], $filter[1]
|
||||
#set $filter_id = '%s_filter_%s' % ($cur_torrent_provider.get_id(), $cur_fval)
|
||||
|
@ -611,22 +707,20 @@ name = '' if not client else get_client_instance(sickbeard.TORRENT_METHOD)().nam
|
|||
<span>$cur_fname</span>
|
||||
</label>
|
||||
#end for
|
||||
<span>(see site for meaning)</span>
|
||||
<p>nothing selected allows everything (no filter, default)</p>
|
||||
</span>
|
||||
<span>(see $cur_torrent_provider.name)</span>
|
||||
</div>
|
||||
#end if
|
||||
#if $hasattr($cur_torrent_provider, 'reject_m2ts'):
|
||||
<div class="field-pair">
|
||||
<div>
|
||||
<label for="${cur_torrent_provider.get_id()}_reject_m2ts">
|
||||
<span class="component-title">Reject Blu-ray M2TS releases</span>
|
||||
<span class="component-desc">
|
||||
<input type="checkbox" name="${cur_torrent_provider.get_id()}_reject_m2ts" id="${cur_torrent_provider.get_id()}_reject_m2ts" <%= html_checked if cur_torrent_provider.reject_m2ts else '' %>/>
|
||||
<p>enable to ignore Blu-ray MPEG-2 Transport Stream container releases</p>
|
||||
</span>
|
||||
<span>not Blu-ray M2TS (MPEG-2 Transport Stream) container releases</span>
|
||||
</label>
|
||||
</div>
|
||||
#end if
|
||||
<p style="clear:both">$filter_tip</p>
|
||||
</span>
|
||||
</div>
|
||||
#if $hasattr($cur_torrent_provider, 'search_mode') and $cur_torrent_provider.supports_backlog:
|
||||
<div class="field-pair">
|
||||
<span class="component-title">Episode search mode</span>
|
||||
|
|
|
@ -3,6 +3,7 @@ import logging
|
|||
import random
|
||||
import re
|
||||
from requests.sessions import Session
|
||||
from requests.models import Response
|
||||
import js2py
|
||||
from copy import deepcopy
|
||||
|
||||
|
@ -40,7 +41,8 @@ class CloudflareScraper(Session):
|
|||
resp = super(CloudflareScraper, self).request(method, url, *args, **kwargs)
|
||||
|
||||
# Check if Cloudflare anti-bot is on
|
||||
if (503 == resp.status_code
|
||||
if (isinstance(resp, type(Response())) and isinstance(resp.headers.get('Server'), basestring)
|
||||
and 503 == resp.status_code
|
||||
and re.search('(?i)cloudflare', resp.headers.get('Server'))
|
||||
and b'jschl_vc' in resp.content
|
||||
and b'jschl_answer' in resp.content):
|
||||
|
|
|
@ -23,6 +23,8 @@ Or read more below...
|
|||
* Choose to delete watched episodes from a list built directly from played media at Kodi, Emby, and/or Plex (No Trakt!)
|
||||
* Smart custom qualities selector system that helps achieve an optimal quality selection for automated episode search
|
||||
* Choose to have episodes upgraded in quality, or keep existing archive quality, and upgrade future episodes either way
|
||||
* Single out providers to target "scene releases" either exclusively, with fallbacks to non-scene, and with optional user exclusions
|
||||
* Mark providers to avoid "scene nuked" releases, with optional fallback when no other choice is available
|
||||
* Natively use a most powerful regex pattern matching system for superior information handling
|
||||
* Select a UI style anytime; Regular, Proview I, or Proview II - independently for Episode View, and for Display Show
|
||||
* Smart fanart system allows you to rate avoid/prefer. UI can be moved or toggled off/on to fully appreciate a fanart
|
||||
|
|
|
@ -1233,83 +1233,79 @@ def initialize(console_logging=True):
|
|||
if GenericProvider.TORRENT == curProvider.providerType]:
|
||||
prov_id = torrent_prov.get_id()
|
||||
prov_id_uc = torrent_prov.get_id().upper()
|
||||
torrent_prov.enabled = bool(check_setting_int(CFG, prov_id_uc, prov_id, 0))
|
||||
torrent_prov.enabled = bool(check_setting_int(CFG, prov_id_uc, prov_id, False))
|
||||
|
||||
# check str with a def of list, don't add to block settings
|
||||
if getattr(torrent_prov, 'url_edit', None):
|
||||
torrent_prov.url_home = check_setting_str(CFG, prov_id_uc, prov_id + '_url_home', [])
|
||||
if hasattr(torrent_prov, 'api_key'):
|
||||
torrent_prov.api_key = check_setting_str(CFG, prov_id_uc, prov_id + '_api_key', '')
|
||||
if hasattr(torrent_prov, 'hash'):
|
||||
torrent_prov.hash = check_setting_str(CFG, prov_id_uc, prov_id + '_hash', '')
|
||||
if hasattr(torrent_prov, 'digest'):
|
||||
torrent_prov.digest = check_setting_str(CFG, prov_id_uc, prov_id + '_digest', '')
|
||||
for user_type in ['username', 'uid']:
|
||||
if hasattr(torrent_prov, user_type):
|
||||
setattr(torrent_prov, user_type,
|
||||
check_setting_str(CFG, prov_id_uc, '%s_%s' % (prov_id, user_type), ''))
|
||||
if hasattr(torrent_prov, 'password'):
|
||||
torrent_prov.password = check_setting_str(CFG, prov_id_uc, prov_id + '_password', '')
|
||||
if hasattr(torrent_prov, 'passkey'):
|
||||
torrent_prov.passkey = check_setting_str(CFG, prov_id_uc, prov_id + '_passkey', '')
|
||||
if hasattr(torrent_prov, 'confirmed'):
|
||||
torrent_prov.confirmed = bool(check_setting_int(CFG, prov_id_uc, prov_id + '_confirmed', 0))
|
||||
if hasattr(torrent_prov, 'options'):
|
||||
torrent_prov.options = check_setting_str(CFG, prov_id_uc, prov_id + '_options', '')
|
||||
if hasattr(torrent_prov, '_seed_ratio'):
|
||||
torrent_prov._seed_ratio = check_setting_str(CFG, prov_id_uc, prov_id + '_seed_ratio', '')
|
||||
if hasattr(torrent_prov, 'seed_time'):
|
||||
torrent_prov.seed_time = check_setting_int(CFG, prov_id_uc, prov_id + '_seed_time', '')
|
||||
if hasattr(torrent_prov, 'minseed'):
|
||||
torrent_prov.minseed = check_setting_int(CFG, prov_id_uc, prov_id + '_minseed', 0)
|
||||
if hasattr(torrent_prov, 'minleech'):
|
||||
torrent_prov.minleech = check_setting_int(CFG, prov_id_uc, prov_id + '_minleech', 0)
|
||||
if hasattr(torrent_prov, 'freeleech'):
|
||||
torrent_prov.freeleech = bool(check_setting_int(CFG, prov_id_uc, prov_id + '_freeleech', 0))
|
||||
if hasattr(torrent_prov, 'reject_m2ts'):
|
||||
torrent_prov.reject_m2ts = bool(check_setting_int(CFG, prov_id_uc, prov_id + '_reject_m2ts', 0))
|
||||
if hasattr(torrent_prov, 'enable_recentsearch'):
|
||||
torrent_prov.enable_recentsearch = bool(check_setting_int(CFG, prov_id_uc,
|
||||
prov_id + '_enable_recentsearch', 1)) or \
|
||||
not getattr(torrent_prov, 'supports_backlog')
|
||||
if hasattr(torrent_prov, 'enable_backlog'):
|
||||
torrent_prov.enable_backlog = bool(check_setting_int(CFG, prov_id_uc, prov_id + '_enable_backlog', 1))
|
||||
if hasattr(torrent_prov, 'enable_scheduled_backlog'):
|
||||
torrent_prov.enable_scheduled_backlog = bool(check_setting_int(
|
||||
CFG, prov_id_uc, prov_id + '_enable_scheduled_backlog', 1))
|
||||
if hasattr(torrent_prov, 'search_mode'):
|
||||
torrent_prov.search_mode = check_setting_str(CFG, prov_id_uc, prov_id + '_search_mode', 'eponly')
|
||||
if hasattr(torrent_prov, 'search_fallback'):
|
||||
torrent_prov.search_fallback = bool(check_setting_int(CFG, prov_id_uc, prov_id + '_search_fallback', 0))
|
||||
|
||||
# check int with a default of str, don't add to block settings
|
||||
attr = 'seed_time'
|
||||
if hasattr(torrent_prov, attr):
|
||||
torrent_prov.seed_time = check_setting_int(CFG, prov_id_uc, '%s_%s' % (prov_id, attr), '')
|
||||
|
||||
# custom cond, don't add to block settings
|
||||
attr = 'enable_recentsearch'
|
||||
if hasattr(torrent_prov, attr):
|
||||
torrent_prov.enable_recentsearch = bool(check_setting_int(
|
||||
CFG, prov_id_uc, '%s_%s' % (prov_id, attr), True)) or not getattr(torrent_prov, 'supports_backlog')
|
||||
|
||||
# check str with a default of list, don't add to block settings
|
||||
if hasattr(torrent_prov, 'filter'):
|
||||
torrent_prov.filter = check_setting_str(CFG, prov_id_uc, prov_id + '_filter', [])
|
||||
|
||||
for (attr, default) in [
|
||||
('enable_backlog', True), ('enable_scheduled_backlog', True),
|
||||
('api_key', ''), ('hash', ''), ('digest', ''),
|
||||
('username', ''), ('uid', ''), ('password', ''), ('passkey', ''),
|
||||
('options', ''),
|
||||
('_seed_ratio', ''), ('minseed', 0), ('minleech', 0),
|
||||
('scene_only', False), ('scene_or_contain', ''), ('scene_loose', False), ('scene_loose_active', False),
|
||||
('scene_rej_nuked', False), ('scene_nuked_active', False),
|
||||
('freeleech', False), ('confirmed', False), ('reject_m2ts', False),
|
||||
('search_mode', 'eponly'), ('search_fallback', False)
|
||||
]:
|
||||
if hasattr(torrent_prov, attr):
|
||||
attr_check = '%s_%s' % (prov_id, attr.strip('_'))
|
||||
if isinstance(default, bool):
|
||||
setattr(torrent_prov, attr, bool(check_setting_int(CFG, prov_id_uc, attr_check, default)))
|
||||
elif isinstance(default, basestring):
|
||||
setattr(torrent_prov, attr, check_setting_str(CFG, prov_id_uc, attr_check, default))
|
||||
elif isinstance(default, int):
|
||||
setattr(torrent_prov, attr, check_setting_int(CFG, prov_id_uc, attr_check, default))
|
||||
|
||||
for nzb_prov in [curProvider for curProvider in providers.sortedProviderList()
|
||||
if GenericProvider.NZB == curProvider.providerType]:
|
||||
prov_id = nzb_prov.get_id()
|
||||
prov_id_uc = nzb_prov.get_id().upper()
|
||||
nzb_prov.enabled = bool(
|
||||
check_setting_int(CFG, prov_id_uc, prov_id, 0))
|
||||
if hasattr(nzb_prov, 'api_key'):
|
||||
nzb_prov.api_key = check_setting_str(CFG, prov_id_uc, prov_id + '_api_key', '')
|
||||
if hasattr(nzb_prov, 'username'):
|
||||
nzb_prov.username = check_setting_str(CFG, prov_id_uc, prov_id + '_username', '')
|
||||
if hasattr(nzb_prov, 'search_mode'):
|
||||
nzb_prov.search_mode = check_setting_str(CFG, prov_id_uc, prov_id + '_search_mode', 'eponly')
|
||||
if hasattr(nzb_prov, 'search_fallback'):
|
||||
nzb_prov.search_fallback = bool(check_setting_int(CFG, prov_id_uc, prov_id + '_search_fallback', 0))
|
||||
if hasattr(nzb_prov, 'enable_recentsearch'):
|
||||
nzb_prov.enable_recentsearch = bool(check_setting_int(CFG, prov_id_uc,
|
||||
prov_id + '_enable_recentsearch', 1)) or \
|
||||
not getattr(nzb_prov, 'supports_backlog')
|
||||
if hasattr(nzb_prov, 'enable_backlog'):
|
||||
nzb_prov.enable_backlog = bool(check_setting_int(CFG, prov_id_uc, prov_id + '_enable_backlog', 1))
|
||||
if hasattr(nzb_prov, 'enable_scheduled_backlog'):
|
||||
nzb_prov.enable_scheduled_backlog = bool(check_setting_int(
|
||||
CFG, prov_id_uc, prov_id + '_enable_scheduled_backlog', 1))
|
||||
nzb_prov.enabled = bool(check_setting_int(CFG, prov_id_uc, prov_id, False))
|
||||
|
||||
attr = 'enable_recentsearch'
|
||||
if hasattr(nzb_prov, attr):
|
||||
nzb_prov.enable_recentsearch = bool(check_setting_int(
|
||||
CFG, prov_id_uc, '%s_%s' % (prov_id, attr), True)) or not getattr(nzb_prov, 'supports_backlog')
|
||||
|
||||
for (attr, default) in [
|
||||
('enable_backlog', True), ('enable_scheduled_backlog', True),
|
||||
('api_key', ''), ('username', ''),
|
||||
('scene_only', False), ('scene_or_contain', ''), ('scene_loose', False), ('scene_loose_active', False),
|
||||
('scene_rej_nuked', False), ('scene_nuked_active', False),
|
||||
('search_mode', 'eponly'), ('search_fallback', False)
|
||||
]:
|
||||
if hasattr(nzb_prov, attr):
|
||||
attr_check = '%s_%s' % (prov_id, attr.strip('_'))
|
||||
if isinstance(default, bool):
|
||||
setattr(nzb_prov, attr, bool(check_setting_int(CFG, prov_id_uc, attr_check, default)))
|
||||
elif isinstance(default, basestring):
|
||||
setattr(nzb_prov, attr, check_setting_str(CFG, prov_id_uc, attr_check, default))
|
||||
elif isinstance(default, int):
|
||||
setattr(nzb_prov, attr, check_setting_int(CFG, prov_id_uc, attr_check, default))
|
||||
|
||||
if not os.path.isfile(CONFIG_FILE):
|
||||
logger.log(u'Unable to find \'%s\', all settings will be default!' % CONFIG_FILE, logger.DEBUG)
|
||||
save_config()
|
||||
elif update_config:
|
||||
update_config = True
|
||||
|
||||
if update_config:
|
||||
save_config()
|
||||
|
||||
# start up all the threads
|
||||
|
@ -1733,10 +1729,15 @@ def save_config():
|
|||
for (setting, value) in [
|
||||
('%s_%s' % (src_id, k), getattr(src, k, v) if not v else helpers.tryInt(getattr(src, k, None)))
|
||||
for (k, v) in [
|
||||
('enable_recentsearch', 1), ('enable_backlog', 1), ('enable_scheduled_backlog', 1),
|
||||
('api_key', None), ('passkey', None), ('digest', None), ('hash', None), ('username', ''), ('uid', ''),
|
||||
('minseed', 1), ('minleech', 1), ('confirmed', 1), ('freeleech', 1), ('reject_m2ts', 1),
|
||||
('enable_recentsearch', 1), ('enable_backlog', 1), ('search_mode', None), ('search_fallback', 1),
|
||||
('seed_time', None), ('enable_scheduled_backlog', 1)] if hasattr(src, k)]:
|
||||
('minseed', 1), ('minleech', 1), ('seed_time', None),
|
||||
('confirmed', 1), ('freeleech', 1), ('reject_m2ts', 1),
|
||||
('scene_only', None), ('scene_or_contain', ''), ('scene_loose', None), ('scene_loose_active', None),
|
||||
('scene_rej_nuked', None), ('scene_nuked_active', None),
|
||||
('search_mode', None), ('search_fallback', 1)
|
||||
]
|
||||
if hasattr(src, k)]:
|
||||
new_config[src_id_uc][setting] = value
|
||||
|
||||
if hasattr(src, '_seed_ratio'):
|
||||
|
@ -1753,10 +1754,17 @@ def save_config():
|
|||
for attr in [x for x in ['api_key', 'username', 'search_mode'] if hasattr(src, x)]:
|
||||
new_config[src_id_uc]['%s_%s' % (src_id, attr)] = getattr(src, attr)
|
||||
|
||||
for attr in [x for x in ['enable_recentsearch', 'enable_backlog', 'search_fallback',
|
||||
'enable_scheduled_backlog'] if hasattr(src, x)]:
|
||||
for attr in [x for x in ['enable_recentsearch', 'enable_backlog', 'enable_scheduled_backlog',
|
||||
'scene_only', 'scene_loose', 'scene_loose_active',
|
||||
'scene_rej_nuked', 'scene_nuked_active',
|
||||
'search_fallback']
|
||||
if hasattr(src, x)]:
|
||||
new_config[src_id_uc]['%s_%s' % (src_id, attr)] = helpers.tryInt(getattr(src, attr, None))
|
||||
|
||||
attr = 'scene_or_contain'
|
||||
if hasattr(src, attr):
|
||||
new_config[src_id_uc]['%s_%s' % (src_id, attr)] = getattr(src, attr, '')
|
||||
|
||||
new_config['SABnzbd'] = {}
|
||||
new_config['SABnzbd']['sab_username'] = SAB_USERNAME
|
||||
new_config['SABnzbd']['sab_password'] = helpers.encrypt(SAB_PASSWORD, ENCRYPTION_VERSION)
|
||||
|
|
|
@ -41,7 +41,7 @@ def search_propers(proper_list=None):
|
|||
if not sickbeard.DOWNLOAD_PROPERS:
|
||||
return
|
||||
|
||||
logger.log(('Checking propers from recent search', 'Beginning search for new propers')[None is proper_list])
|
||||
logger.log(('Checking Propers from recent search', 'Beginning search for new Propers')[None is proper_list])
|
||||
|
||||
age_shows, age_anime = sickbeard.BACKLOG_DAYS + 2, 14
|
||||
aired_since_shows = datetime.datetime.today() - datetime.timedelta(days=age_shows)
|
||||
|
@ -53,7 +53,7 @@ def search_propers(proper_list=None):
|
|||
if propers:
|
||||
_download_propers(propers)
|
||||
else:
|
||||
logger.log(u'No downloads or snatches found for the last %s%s days to use for a propers search' %
|
||||
logger.log('No downloads or snatches found for the last %s%s days to use for a Propers search' %
|
||||
(age_shows, ('', ' (%s for anime)' % age_anime)[helpers.has_anime()]))
|
||||
|
||||
run_at = ''
|
||||
|
@ -63,18 +63,17 @@ def search_propers(proper_list=None):
|
|||
proper_sch = sickbeard.properFinderScheduler
|
||||
if None is proper_sch.start_time:
|
||||
run_in = proper_sch.lastRun + proper_sch.cycleTime - datetime.datetime.now()
|
||||
run_at = u', next check '
|
||||
run_at = ', next check '
|
||||
if datetime.timedelta() > run_in:
|
||||
run_at += u'imminent'
|
||||
run_at += 'imminent'
|
||||
else:
|
||||
hours, remainder = divmod(run_in.seconds, 3600)
|
||||
minutes, seconds = divmod(remainder, 60)
|
||||
run_at += u'in approx. ' + ('%dh, %dm' % (hours, minutes) if 0 < hours else
|
||||
'%dm, %ds' % (minutes, seconds))
|
||||
run_at += 'in approx. ' + ('%dm, %ds' % (minutes, seconds), '%dh, %dm' % (hours, minutes))[0 < hours]
|
||||
|
||||
logger.log(u'Completed search for new propers%s' % run_at)
|
||||
logger.log('Completed search for new Propers%s' % run_at)
|
||||
else:
|
||||
logger.log(u'Completed checking propers from recent search')
|
||||
logger.log('Completed checking Propers from recent search')
|
||||
|
||||
|
||||
def get_old_proper_level(show_obj, indexer, indexerid, season, episodes, old_status, new_quality,
|
||||
|
@ -89,9 +88,11 @@ def get_old_proper_level(show_obj, indexer, indexerid, season, episodes, old_sta
|
|||
my_db = db.DBConnection()
|
||||
np = NameParser(False, showObj=show_obj)
|
||||
for episode in episodes:
|
||||
result = my_db.select('SELECT resource FROM history WHERE showid = ? AND season = ? AND episode = ? AND '
|
||||
'(' + ' OR '.join("action LIKE '%%%02d'" % x for x in SNATCHED_ANY) + ') '
|
||||
'ORDER BY date DESC LIMIT 1',
|
||||
result = my_db.select(
|
||||
'SELECT resource FROM history'
|
||||
' WHERE showid = ?'
|
||||
' AND season = ? AND episode = ? AND '
|
||||
'(%s) ORDER BY date DESC LIMIT 1' % (' OR '.join('action LIKE "%%%02d"' % x for x in SNATCHED_ANY)),
|
||||
[indexerid, season, episode])
|
||||
if not result or not isinstance(result[0]['resource'], basestring) or not result[0]['resource']:
|
||||
continue
|
||||
|
@ -180,9 +181,10 @@ def load_webdl_types():
|
|||
def _get_proper_list(aired_since_shows, recent_shows, recent_anime, proper_list=None):
|
||||
propers = {}
|
||||
|
||||
# for each provider get a list of the
|
||||
my_db = db.DBConnection()
|
||||
# for each provider get a list of arbitrary Propers
|
||||
orig_thread_name = threading.currentThread().name
|
||||
providers = [x for x in sickbeard.providers.sortedProviderList() if x.is_active()]
|
||||
providers = filter(lambda p: p.is_active(), sickbeard.providers.sortedProviderList())
|
||||
for cur_provider in providers:
|
||||
if not recent_anime and cur_provider.anime_only:
|
||||
continue
|
||||
|
@ -192,253 +194,277 @@ def _get_proper_list(aired_since_shows, recent_shows, recent_anime, proper_list=
|
|||
if not found_propers:
|
||||
continue
|
||||
else:
|
||||
threading.currentThread().name = orig_thread_name + ' :: [' + cur_provider.name + ']'
|
||||
threading.currentThread().name = '%s :: [%s]' % (orig_thread_name, cur_provider.name)
|
||||
|
||||
logger.log(u'Searching for new PROPER releases')
|
||||
logger.log('Searching for new PROPER releases')
|
||||
|
||||
try:
|
||||
found_propers = cur_provider.find_propers(search_date=aired_since_shows, shows=recent_shows,
|
||||
anime=recent_anime)
|
||||
except exceptions.AuthException as e:
|
||||
logger.log(u'Authentication error: ' + ex(e), logger.ERROR)
|
||||
logger.log('Authentication error: %s' % ex(e), logger.ERROR)
|
||||
continue
|
||||
except Exception as e:
|
||||
logger.log(u'Error while searching ' + cur_provider.name + ', skipping: ' + ex(e), logger.ERROR)
|
||||
logger.log('Error while searching %s, skipping: %s' % (cur_provider.name, ex(e)), logger.ERROR)
|
||||
logger.log(traceback.format_exc(), logger.ERROR)
|
||||
continue
|
||||
finally:
|
||||
threading.currentThread().name = orig_thread_name
|
||||
|
||||
# if they haven't been added by a different provider than add the proper to the list
|
||||
count = 0
|
||||
for x in found_propers:
|
||||
name = _generic_name(x.name)
|
||||
if name not in propers:
|
||||
# if they haven't been added by a different provider than add the Proper to the list
|
||||
for cur_proper in found_propers:
|
||||
name = _generic_name(cur_proper.name)
|
||||
if name in propers:
|
||||
continue
|
||||
|
||||
try:
|
||||
np = NameParser(False, try_scene_exceptions=True, showObj=x.parsed_show, indexer_lookup=False)
|
||||
parse_result = np.parse(x.name)
|
||||
if parse_result.series_name and parse_result.episode_numbers and \
|
||||
(parse_result.show.indexer, parse_result.show.indexerid) in recent_shows + recent_anime:
|
||||
cur_size = getattr(x, 'size', None)
|
||||
if failed_history.has_failed(x.name, cur_size, cur_provider.name):
|
||||
continue
|
||||
logger.log(u'Found new proper: ' + x.name, logger.DEBUG)
|
||||
x.show = parse_result.show.indexerid
|
||||
x.provider = cur_provider
|
||||
x.is_repack, x.properlevel = Quality.get_proper_level(parse_result.extra_info_no_name(),
|
||||
parse_result.version,
|
||||
parse_result.is_anime,
|
||||
check_is_repack=True)
|
||||
x.is_internal = parse_result.extra_info_no_name() and \
|
||||
re.search(r'\binternal\b', parse_result.extra_info_no_name(), flags=re.I)
|
||||
x.codec = _get_codec(parse_result.extra_info_no_name())
|
||||
propers[name] = x
|
||||
count += 1
|
||||
except (InvalidNameException, InvalidShowException):
|
||||
continue
|
||||
except (StandardError, Exception):
|
||||
continue
|
||||
|
||||
cur_provider.log_result('Propers', count, '%s' % cur_provider.name)
|
||||
|
||||
# take the list of unique propers and get it sorted by
|
||||
sorted_propers = sorted(propers.values(), key=operator.attrgetter('properlevel', 'date'), reverse=True)
|
||||
verified_propers = set()
|
||||
|
||||
for cur_proper in sorted_propers:
|
||||
|
||||
np = NameParser(False, try_scene_exceptions=True, showObj=cur_proper.parsed_show, indexer_lookup=False)
|
||||
try:
|
||||
parse_result = np.parse(cur_proper.name)
|
||||
except (StandardError, Exception):
|
||||
except (InvalidNameException, InvalidShowException, Exception):
|
||||
continue
|
||||
|
||||
# set the indexerid in the db to the show's indexerid
|
||||
cur_proper.indexerid = parse_result.show.indexerid
|
||||
# get the show object
|
||||
cur_proper.parsed_show = (cur_proper.parsed_show
|
||||
or helpers.findCertainShow(sickbeard.showList, parse_result.show.indexerid))
|
||||
if None is cur_proper.parsed_show:
|
||||
logger.log('Skip download; cannot find show with indexerid [%s]' % cur_proper.indexerid, logger.ERROR)
|
||||
continue
|
||||
|
||||
# set the indexer in the db to the show's indexer
|
||||
cur_proper.indexer = parse_result.show.indexer
|
||||
cur_proper.indexer = cur_proper.parsed_show.indexer
|
||||
cur_proper.indexerid = cur_proper.parsed_show.indexerid
|
||||
|
||||
# populate our Proper instance
|
||||
cur_proper.season = parse_result.season_number if None is not parse_result.season_number else 1
|
||||
cur_proper.episode = parse_result.episode_numbers[0]
|
||||
cur_proper.release_group = parse_result.release_group
|
||||
cur_proper.version = parse_result.version
|
||||
cur_proper.extra_info = parse_result.extra_info
|
||||
cur_proper.extra_info_no_name = parse_result.extra_info_no_name
|
||||
cur_proper.quality = Quality.nameQuality(cur_proper.name, parse_result.is_anime)
|
||||
cur_proper.is_anime = parse_result.is_anime
|
||||
if not (-1 != cur_proper.indexerid and parse_result.series_name and parse_result.episode_numbers
|
||||
and (cur_proper.indexer, cur_proper.indexerid) in recent_shows + recent_anime):
|
||||
continue
|
||||
|
||||
# only get anime proper if it has release group and version
|
||||
if parse_result.is_anime:
|
||||
if not cur_proper.release_group and -1 == cur_proper.version:
|
||||
logger.log(u'Proper %s doesn\'t have a release group and version, ignoring it' % cur_proper.name,
|
||||
# only get anime Proper if it has release group and version
|
||||
if parse_result.is_anime and not parse_result.release_group and -1 == parse_result.version:
|
||||
logger.log('Ignored Proper with no release group and version in name [%s]' % cur_proper.name,
|
||||
logger.DEBUG)
|
||||
continue
|
||||
|
||||
if not show_name_helpers.pass_wordlist_checks(cur_proper.name, parse=False, indexer_lookup=False):
|
||||
logger.log(u'Proper %s isn\'t a valid scene release that we want, ignoring it' % cur_proper.name,
|
||||
logger.DEBUG)
|
||||
logger.log('Ignored unwanted Proper [%s]' % cur_proper.name, logger.DEBUG)
|
||||
continue
|
||||
|
||||
re_extras = dict(re_prefix='.*', re_suffix='.*')
|
||||
result = show_name_helpers.contains_any(cur_proper.name, parse_result.show.rls_ignore_words, **re_extras)
|
||||
re_x = dict(re_prefix='.*', re_suffix='.*')
|
||||
result = show_name_helpers.contains_any(cur_proper.name, cur_proper.parsed_show.rls_ignore_words, **re_x)
|
||||
if None is not result and result:
|
||||
logger.log(u'Ignored: %s for containing ignore word' % cur_proper.name)
|
||||
logger.log('Ignored Proper containing ignore word [%s]' % cur_proper.name, logger.DEBUG)
|
||||
continue
|
||||
|
||||
result = show_name_helpers.contains_any(cur_proper.name, parse_result.show.rls_require_words, **re_extras)
|
||||
result = show_name_helpers.contains_any(cur_proper.name, cur_proper.parsed_show.rls_require_words, **re_x)
|
||||
if None is not result and not result:
|
||||
logger.log(u'Ignored: %s for not containing any required word match' % cur_proper.name)
|
||||
logger.log('Ignored Proper for not containing any required word [%s]' % cur_proper.name, logger.DEBUG)
|
||||
continue
|
||||
|
||||
# check if we actually want this proper (if it's the right quality)
|
||||
my_db = db.DBConnection()
|
||||
cur_size = getattr(cur_proper, 'size', None)
|
||||
if failed_history.has_failed(cur_proper.name, cur_size, cur_provider.name):
|
||||
continue
|
||||
|
||||
cur_proper.season = parse_result.season_number if None is not parse_result.season_number else 1
|
||||
cur_proper.episode = parse_result.episode_numbers[0]
|
||||
# check if we actually want this Proper (if it's the right quality)
|
||||
sql_results = my_db.select(
|
||||
'SELECT release_group, status, version, release_name FROM tv_episodes WHERE showid = ? AND indexer = ? ' +
|
||||
'AND season = ? AND episode = ?',
|
||||
'SELECT release_group, status, version, release_name'
|
||||
' FROM tv_episodes'
|
||||
' WHERE showid = ? AND indexer = ? AND season = ? AND episode = ?'
|
||||
' LIMIT 1',
|
||||
[cur_proper.indexerid, cur_proper.indexer, cur_proper.season, cur_proper.episode])
|
||||
if not sql_results:
|
||||
continue
|
||||
|
||||
# only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones)
|
||||
# don't take proper of the same level we already downloaded
|
||||
old_status, old_quality = Quality.splitCompositeStatus(int(sql_results[0]['status']))
|
||||
cur_proper.is_repack, cur_proper.proper_level = Quality.get_proper_level(cur_proper.extra_info_no_name(),
|
||||
cur_proper.version,
|
||||
cur_proper.is_anime,
|
||||
check_is_repack=True)
|
||||
|
||||
old_release_group = sql_results[0]['release_group']
|
||||
# only keep the Proper if we already retrieved the same quality ep (don't get better/worse ones)
|
||||
# check if we want this release: same quality as current, current has correct status
|
||||
# restrict other release group releases to proper's
|
||||
# restrict other release group releases to Proper's
|
||||
old_status, old_quality = Quality.splitCompositeStatus(int(sql_results[0]['status']))
|
||||
cur_proper.quality = Quality.nameQuality(cur_proper.name, parse_result.is_anime)
|
||||
cur_proper.is_repack, cur_proper.properlevel = Quality.get_proper_level(
|
||||
parse_result.extra_info_no_name(), parse_result.version, parse_result.is_anime, check_is_repack=True)
|
||||
cur_proper.proper_level = cur_proper.properlevel # local non global value
|
||||
old_release_group = sql_results[0]['release_group']
|
||||
same_release_group = parse_result.release_group == old_release_group
|
||||
if old_status not in SNATCHED_ANY + [DOWNLOADED, ARCHIVED] \
|
||||
or cur_proper.quality != old_quality \
|
||||
or (cur_proper.is_repack and cur_proper.release_group != old_release_group):
|
||||
or (cur_proper.is_repack and not same_release_group):
|
||||
continue
|
||||
|
||||
np = NameParser(False, try_scene_exceptions=True, showObj=parse_result.show, indexer_lookup=False)
|
||||
np = NameParser(False, try_scene_exceptions=True, showObj=cur_proper.parsed_show, indexer_lookup=False)
|
||||
try:
|
||||
extra_info = np.parse(sql_results[0]['release_name']).extra_info_no_name()
|
||||
except (StandardError, Exception):
|
||||
extra_info = None
|
||||
|
||||
# don't take Proper of the same level we already downloaded
|
||||
old_proper_level, old_is_internal, old_codec, old_extra_no_name, old_name = \
|
||||
get_old_proper_level(parse_result.show, cur_proper.indexer, cur_proper.indexerid, cur_proper.season,
|
||||
parse_result.episode_numbers, old_status, cur_proper.quality, extra_info,
|
||||
cur_proper.version, cur_proper.is_anime)
|
||||
|
||||
old_name = (old_name, sql_results[0]['release_name'])[old_name in ('', None)]
|
||||
get_old_proper_level(cur_proper.parsed_show, cur_proper.indexer, cur_proper.indexerid,
|
||||
cur_proper.season, parse_result.episode_numbers,
|
||||
old_status, cur_proper.quality, extra_info,
|
||||
parse_result.version, parse_result.is_anime)
|
||||
cur_proper.codec = _get_codec(parse_result.extra_info_no_name())
|
||||
if cur_proper.proper_level < old_proper_level:
|
||||
continue
|
||||
elif cur_proper.proper_level == old_proper_level:
|
||||
if '264' == cur_proper.codec and 'xvid' == old_codec:
|
||||
pass
|
||||
elif old_is_internal and not cur_proper.is_internal:
|
||||
pass
|
||||
else:
|
||||
continue
|
||||
|
||||
log_same_grp = 'Skipping proper from release group: [%s], does not match existing release group: [%s] for [%s]'\
|
||||
% (cur_proper.release_group, old_release_group, cur_proper.name)
|
||||
cur_proper.is_internal = (parse_result.extra_info_no_name() and
|
||||
re.search(r'\binternal\b', parse_result.extra_info_no_name(), flags=re.I))
|
||||
if cur_proper.proper_level == old_proper_level:
|
||||
if (('264' == cur_proper.codec and 'xvid' == old_codec)
|
||||
or (old_is_internal and not cur_proper.is_internal)):
|
||||
pass
|
||||
continue
|
||||
|
||||
is_web = (old_quality in (Quality.HDWEBDL, Quality.FULLHDWEBDL, Quality.UHD4KWEB) or
|
||||
(old_quality == Quality.SDTV and re.search(r'\Wweb.?(dl|rip|.[hx]26[45])\W',
|
||||
str(sql_results[0]['release_name']), re.I)))
|
||||
|
||||
if is_web:
|
||||
old_name = (old_name, sql_results[0]['release_name'])[old_name in ('', None)]
|
||||
old_webdl_type = get_webdl_type(old_extra_no_name, old_name)
|
||||
new_webdl_type = get_webdl_type(cur_proper.extra_info_no_name(), cur_proper.name)
|
||||
new_webdl_type = get_webdl_type(parse_result.extra_info_no_name(), cur_proper.name)
|
||||
if old_webdl_type != new_webdl_type:
|
||||
logger.log('Skipping proper webdl source: [%s], does not match existing webdl source: [%s] for [%s]'
|
||||
logger.log('Ignored Proper webdl source [%s], does not match existing webdl source [%s] for [%s]'
|
||||
% (old_webdl_type, new_webdl_type, cur_proper.name), logger.DEBUG)
|
||||
continue
|
||||
|
||||
# for webldls, prevent propers from different groups
|
||||
if sickbeard.PROPERS_WEBDL_ONEGRP and is_web and cur_proper.release_group != old_release_group:
|
||||
# for webdls, prevent Propers from different groups
|
||||
log_same_grp = 'Ignored Proper from release group [%s] does not match existing group [%s] for [%s]' \
|
||||
% (parse_result.release_group, old_release_group, cur_proper.name)
|
||||
if sickbeard.PROPERS_WEBDL_ONEGRP and is_web and not same_release_group:
|
||||
logger.log(log_same_grp, logger.DEBUG)
|
||||
continue
|
||||
|
||||
# check if we actually want this proper (if it's the right release group and a higher version)
|
||||
# check if we actually want this Proper (if it's the right release group and a higher version)
|
||||
if parse_result.is_anime:
|
||||
|
||||
old_version = int(sql_results[0]['version'])
|
||||
if -1 < old_version < cur_proper.version:
|
||||
logger.log(u'Found new anime v%s to replace existing v%s' % (cur_proper.version, old_version))
|
||||
else:
|
||||
if not (-1 < old_version < parse_result.version):
|
||||
continue
|
||||
|
||||
if cur_proper.release_group != old_release_group:
|
||||
if not same_release_group:
|
||||
logger.log(log_same_grp, logger.DEBUG)
|
||||
continue
|
||||
|
||||
# if the show is in our list and there hasn't been a proper already added for that particular episode
|
||||
# then add it to our list of propers
|
||||
if cur_proper.indexerid != -1:
|
||||
if (cur_proper.indexerid, cur_proper.indexer, cur_proper.season, cur_proper.episode) not in map(
|
||||
operator.attrgetter('indexerid', 'indexer', 'season', 'episode'), verified_propers):
|
||||
logger.log(u'Found a proper that may be useful: %s' % cur_proper.name)
|
||||
verified_propers.add(cur_proper)
|
||||
found_msg = 'Found anime Proper v%s to replace v%s' % (parse_result.version, old_version)
|
||||
else:
|
||||
rp = set()
|
||||
for vp in verified_propers:
|
||||
if vp.indexer == cur_proper.indexer and vp.indexerid == cur_proper.indexerid and \
|
||||
vp.season == cur_proper.season and vp.episode == cur_proper.episode and \
|
||||
vp.proper_level < cur_proper.proper_level:
|
||||
rp.add(vp)
|
||||
if rp:
|
||||
verified_propers = verified_propers - rp
|
||||
logger.log(u'Found a proper that may be useful: %s' % cur_proper.name)
|
||||
verified_propers.add(cur_proper)
|
||||
|
||||
return list(verified_propers)
|
||||
|
||||
|
||||
def _download_propers(proper_list):
|
||||
|
||||
for cur_proper in proper_list:
|
||||
|
||||
history_limit = datetime.datetime.today() - datetime.timedelta(days=30)
|
||||
found_msg = 'Found Proper [%s]' % cur_proper.name
|
||||
|
||||
# make sure the episode has been downloaded before
|
||||
my_db = db.DBConnection()
|
||||
history_limit = datetime.datetime.today() - datetime.timedelta(days=30)
|
||||
history_results = my_db.select(
|
||||
'SELECT resource FROM history ' +
|
||||
'WHERE showid = ? AND season = ? AND episode = ? AND quality = ? AND date >= ? ' +
|
||||
'AND (' + ' OR '.join("action LIKE '%%%02d'" % x for x in SNATCHED_ANY + [DOWNLOADED, ARCHIVED]) + ')',
|
||||
[cur_proper.indexerid, cur_proper.season, cur_proper.episode, cur_proper.quality,
|
||||
'SELECT resource FROM history'
|
||||
' WHERE showid = ?'
|
||||
' AND season = ? AND episode = ? AND quality = ? AND date >= ?'
|
||||
' AND (%s)' % ' OR '.join('action LIKE "%%%02d"' % x for x in SNATCHED_ANY + [DOWNLOADED, ARCHIVED]),
|
||||
[cur_proper.indexerid,
|
||||
cur_proper.season, cur_proper.episode, cur_proper.quality,
|
||||
history_limit.strftime(history.dateFormat)])
|
||||
|
||||
# if we didn't download this episode in the first place we don't know what quality to use for the proper = skip
|
||||
if 0 == len(history_results):
|
||||
logger.log(u'Skipping download because cannot find an original history entry for proper ' + cur_proper.name)
|
||||
# skip if the episode has never downloaded, because a previous quality is required to match the Proper
|
||||
if not len(history_results):
|
||||
logger.log('Ignored Proper cannot find a recent history item for [%s]' % cur_proper.name, logger.DEBUG)
|
||||
continue
|
||||
|
||||
else:
|
||||
|
||||
# get the show object
|
||||
show_obj = helpers.findCertainShow(sickbeard.showList, cur_proper.indexerid)
|
||||
if None is show_obj:
|
||||
logger.log(u'Unable to find the show with indexerid ' + str(
|
||||
cur_proper.indexerid) + ' so unable to download the proper', logger.ERROR)
|
||||
continue
|
||||
|
||||
# make sure that none of the existing history downloads are the same proper we're trying to download
|
||||
clean_proper_name = _generic_name(helpers.remove_non_release_groups(cur_proper.name, show_obj.is_anime))
|
||||
# make sure that none of the existing history downloads are the same Proper as the download candidate
|
||||
clean_proper_name = _generic_name(helpers.remove_non_release_groups(
|
||||
cur_proper.name, cur_proper.parsed_show.is_anime))
|
||||
is_same = False
|
||||
for result in history_results:
|
||||
for hitem in history_results:
|
||||
# if the result exists in history already we need to skip it
|
||||
if clean_proper_name == _generic_name(helpers.remove_non_release_groups(
|
||||
ek.ek(os.path.basename, result['resource']))):
|
||||
ek.ek(os.path.basename, hitem['resource']))):
|
||||
is_same = True
|
||||
break
|
||||
if is_same:
|
||||
logger.log(u'This proper is already in history, skipping it', logger.DEBUG)
|
||||
logger.log('Ignored Proper already in history [%s]' % cur_proper.name)
|
||||
continue
|
||||
|
||||
ep_obj = show_obj.getEpisode(cur_proper.season, cur_proper.episode)
|
||||
logger.log(found_msg, logger.DEBUG)
|
||||
|
||||
# finish populating the Proper instance
|
||||
# cur_proper.show = cur_proper.parsed_show.indexerid
|
||||
cur_proper.provider = cur_provider
|
||||
cur_proper.extra_info = parse_result.extra_info
|
||||
cur_proper.extra_info_no_name = parse_result.extra_info_no_name
|
||||
cur_proper.release_group = parse_result.release_group
|
||||
|
||||
cur_proper.is_anime = parse_result.is_anime
|
||||
cur_proper.version = parse_result.version
|
||||
|
||||
propers[name] = cur_proper
|
||||
|
||||
cur_provider.log_result('Propers', len(propers), '%s' % cur_provider.name)
|
||||
|
||||
return propers.values()
|
||||
|
||||
|
||||
def _download_propers(proper_list):
|
||||
verified_propers = True
|
||||
consumed_proper = []
|
||||
downloaded_epid = set()
|
||||
|
||||
_epid = operator.attrgetter('indexerid', 'indexer', 'season', 'episode')
|
||||
while verified_propers:
|
||||
verified_propers = set()
|
||||
|
||||
# get verified list; sort the list of unique Propers for highest proper_level, newest first
|
||||
for cur_proper in sorted(
|
||||
filter(lambda p: p not in consumed_proper,
|
||||
# allows Proper to fail or be rejected and another to be tried (with a different name)
|
||||
filter(lambda p: _epid(p) not in downloaded_epid, proper_list)),
|
||||
key=operator.attrgetter('properlevel', 'date'), reverse=True):
|
||||
|
||||
epid = _epid(cur_proper)
|
||||
|
||||
# if the show is in our list and there hasn't been a Proper already added for that particular episode
|
||||
# then add it to our list of Propers
|
||||
if epid not in map(_epid, verified_propers):
|
||||
logger.log('Proper may be useful [%s]' % cur_proper.name)
|
||||
verified_propers.add(cur_proper)
|
||||
else:
|
||||
# use Proper with the highest level
|
||||
remove_propers = set()
|
||||
map(lambda vp: remove_propers.add(vp),
|
||||
filter(lambda p: (epid == _epid(p) and cur_proper.proper_level > p.proper_level), verified_propers))
|
||||
|
||||
if remove_propers:
|
||||
verified_propers -= remove_propers
|
||||
logger.log('A more useful Proper [%s]' % cur_proper.name)
|
||||
verified_propers.add(cur_proper)
|
||||
|
||||
for cur_proper in list(verified_propers):
|
||||
consumed_proper += [cur_proper]
|
||||
|
||||
# scene release checking
|
||||
scene_only = getattr(cur_proper.provider, 'scene_only', False)
|
||||
scene_rej_nuked = getattr(cur_proper.provider, 'scene_rej_nuked', False)
|
||||
if any([scene_only, scene_rej_nuked]) and not cur_proper.parsed_show.is_anime:
|
||||
scene_or_contain = getattr(cur_proper.provider, 'scene_or_contain', '')
|
||||
scene_contains = False
|
||||
if scene_only and scene_or_contain:
|
||||
re_extras = dict(re_prefix='.*', re_suffix='.*')
|
||||
r = show_name_helpers.contains_any(cur_proper.name, scene_or_contain, **re_extras)
|
||||
if None is not r and r:
|
||||
scene_contains = True
|
||||
|
||||
if scene_contains and not scene_rej_nuked:
|
||||
reject = False
|
||||
else:
|
||||
reject, url = search.can_reject(cur_proper.name)
|
||||
if reject:
|
||||
if isinstance(reject, basestring):
|
||||
if scene_rej_nuked:
|
||||
logger.log('Rejecting nuked release. Nuke reason [%s] source [%s]' % (reject, url),
|
||||
logger.DEBUG)
|
||||
else:
|
||||
logger.log('Considering nuked release. Nuke reason [%s] source [%s]' % (reject, url),
|
||||
logger.DEBUG)
|
||||
reject = False
|
||||
elif scene_contains:
|
||||
reject = False
|
||||
else:
|
||||
logger.log('Rejecting as not scene release listed at any [%s]' % url, logger.DEBUG)
|
||||
|
||||
if reject:
|
||||
continue
|
||||
|
||||
# make the result object
|
||||
ep_obj = cur_proper.parsed_show.getEpisode(cur_proper.season, cur_proper.episode)
|
||||
result = cur_proper.provider.get_result([ep_obj], cur_proper.url)
|
||||
if None is result:
|
||||
continue
|
||||
|
@ -450,7 +476,8 @@ def _download_propers(proper_list):
|
|||
result.puid = cur_proper.puid
|
||||
|
||||
# snatch it
|
||||
search.snatch_episode(result, SNATCHED_PROPER)
|
||||
if search.snatch_episode(result, SNATCHED_PROPER):
|
||||
downloaded_epid.add(_epid(cur_proper))
|
||||
|
||||
|
||||
def get_needed_qualites(needed=None):
|
||||
|
|
|
@ -242,6 +242,13 @@ class GenericProvider(object):
|
|||
self.fail_times = {1: (0, 15), 2: (0, 30), 3: (1, 0), 4: (2, 0), 5: (3, 0), 6: (6, 0), 7: (12, 0), 8: (24, 0)}
|
||||
self._load_fail_values()
|
||||
|
||||
self.scene_only = False
|
||||
self.scene_or_contain = ''
|
||||
self.scene_loose = False
|
||||
self.scene_loose_active = False
|
||||
self.scene_rej_nuked = False
|
||||
self.scene_nuked_active = False
|
||||
|
||||
def _load_fail_values(self):
|
||||
if hasattr(sickbeard, 'DATA_DIR'):
|
||||
my_db = db.DBConnection('cache.db')
|
||||
|
|
|
@ -123,11 +123,12 @@ class NewznabProvider(generic.NZBProvider):
|
|||
self._caps_last_updated = datetime.datetime.fromordinal(1)
|
||||
self.cache = NewznabCache(self)
|
||||
# filters
|
||||
if super(NewznabProvider, self).get_id() in ('nzbs_org',):
|
||||
self.filter = []
|
||||
if 'nzbs_org' == super(NewznabProvider, self).get_id():
|
||||
self.may_filter = OrderedDict([
|
||||
('so', ('scene only', False)), ('snn', ('scene not nuked', False))])
|
||||
# deprecated; kept here as bookmark for new haspretime:0|1 + nuked:0|1 can be used here instead
|
||||
# if super(NewznabProvider, self).get_id() in ('nzbs_org',):
|
||||
# self.filter = []
|
||||
# if 'nzbs_org' == super(NewznabProvider, self).get_id():
|
||||
# self.may_filter = OrderedDict([
|
||||
# ('so', ('scene only', False)), ('snn', ('scene not nuked', False))])
|
||||
|
||||
@property
|
||||
def cat_ids(self):
|
||||
|
@ -727,9 +728,10 @@ class NewznabProvider(generic.NZBProvider):
|
|||
request_params['t'] = 'search'
|
||||
request_params.update(params)
|
||||
|
||||
if hasattr(self, 'filter'):
|
||||
if 'nzbs_org' == self.get_id():
|
||||
request_params['rls'] = ((0, 1)['so' in self.filter], 2)['snn' in self.filter]
|
||||
# deprecated; kept here as bookmark for new haspretime:0|1 + nuked:0|1 can be used here instead
|
||||
# if hasattr(self, 'filter'):
|
||||
# if 'nzbs_org' == self.get_id():
|
||||
# request_params['rls'] = ((0, 1)['so' in self.filter], 2)['snn' in self.filter]
|
||||
|
||||
# workaround a strange glitch
|
||||
if sum(ord(i) for i in self.get_id()) in [383] and 5 == 14 - request_params['maxage']:
|
||||
|
|
|
@ -116,7 +116,7 @@ class SceneHDProvider(generic.TorrentProvider):
|
|||
|
||||
@staticmethod
|
||||
def ui_string(key):
|
||||
return 'scenehd_confirm' == key and 'skip releases marked as bad/nuked' or ''
|
||||
return 'scenehd_confirm' == key and 'not marked as bad/nuked' or ''
|
||||
|
||||
|
||||
provider = SceneHDProvider()
|
||||
|
|
|
@ -206,20 +206,35 @@ def pass_show_wordlist_checks(name, show):
|
|||
return True
|
||||
|
||||
|
||||
def pick_best_result(results, show, quality_list=None):
|
||||
def pick_best_result(results, show, quality_list=None, filter_rls=False):
|
||||
logger.log(u'Picking the best result out of %s' % [x.name for x in results], logger.DEBUG)
|
||||
|
||||
# find the best result for the current episode
|
||||
best_result = None
|
||||
for cur_result in results:
|
||||
best_fallback_result = None
|
||||
scene_only = scene_or_contain = scene_loose = scene_loose_active = scene_rej_nuked = scene_nuked_active = False
|
||||
if filter_rls:
|
||||
try:
|
||||
provider = getattr(results[0], 'provider', None)
|
||||
scene_only = getattr(provider, 'scene_only', False)
|
||||
scene_or_contain = getattr(provider, 'scene_or_contain', '')
|
||||
recent_task = 'RECENT' in filter_rls
|
||||
scene_loose = getattr(provider, 'scene_loose', False) and recent_task
|
||||
scene_loose_active = getattr(provider, 'scene_loose_active', False) and not recent_task
|
||||
scene_rej_nuked = getattr(provider, 'scene_rej_nuked', False)
|
||||
scene_nuked_active = getattr(provider, 'scene_nuked_active', False) and not recent_task
|
||||
except (StandardError, Exception):
|
||||
filter_rls = False
|
||||
|
||||
logger.log(u'Quality is %s for [%s]' % (Quality.qualityStrings[cur_result.quality], cur_result.name))
|
||||
addendum = ''
|
||||
for cur_result in results:
|
||||
|
||||
if show.is_anime and not show.release_groups.is_valid(cur_result):
|
||||
continue
|
||||
|
||||
if quality_list and cur_result.quality not in quality_list:
|
||||
logger.log(u'Rejecting unwanted quality [%s]' % cur_result.name, logger.DEBUG)
|
||||
logger.log(u'Rejecting unwanted quality %s for [%s]' % (
|
||||
Quality.qualityStrings[cur_result.quality], cur_result.name), logger.DEBUG)
|
||||
continue
|
||||
|
||||
if not pass_show_wordlist_checks(cur_result.name, show):
|
||||
|
@ -231,6 +246,60 @@ def pick_best_result(results, show, quality_list=None):
|
|||
logger.log(u'Rejecting previously failed [%s]' % cur_result.name)
|
||||
continue
|
||||
|
||||
if filter_rls and any([scene_only, scene_loose, scene_loose_active, scene_rej_nuked, scene_nuked_active]):
|
||||
if show.is_anime:
|
||||
addendum = u'anime (skipping scene/nuke filter) '
|
||||
else:
|
||||
scene_contains = False
|
||||
if scene_only and scene_or_contain:
|
||||
re_extras = dict(re_prefix='.*', re_suffix='.*')
|
||||
r = show_name_helpers.contains_any(cur_result.name, scene_or_contain, **re_extras)
|
||||
if None is not r and r:
|
||||
scene_contains = True
|
||||
|
||||
if scene_contains and not scene_rej_nuked:
|
||||
logger.log(u'Considering title match to \'or contain\' [%s]' % cur_result.name, logger.DEBUG)
|
||||
reject = False
|
||||
else:
|
||||
reject, url = can_reject(cur_result.name)
|
||||
if reject:
|
||||
if isinstance(reject, basestring):
|
||||
if scene_rej_nuked and not scene_nuked_active:
|
||||
logger.log(u'Rejecting nuked release. Nuke reason [%s] source [%s]' % (reject, url),
|
||||
logger.DEBUG)
|
||||
elif scene_nuked_active:
|
||||
best_fallback_result = best_candidate(best_fallback_result, cur_result)
|
||||
else:
|
||||
logger.log(u'Considering nuked release. Nuke reason [%s] source [%s]' % (reject, url),
|
||||
logger.DEBUG)
|
||||
reject = False
|
||||
elif scene_contains or any([scene_loose, scene_loose_active]):
|
||||
best_fallback_result = best_candidate(best_fallback_result, cur_result)
|
||||
else:
|
||||
logger.log(u'Rejecting as not scene release listed at any [%s]' % url, logger.DEBUG)
|
||||
|
||||
if reject:
|
||||
continue
|
||||
|
||||
best_result = best_candidate(best_result, cur_result)
|
||||
|
||||
if best_result and scene_only and not show.is_anime:
|
||||
addendum = u'scene release filtered '
|
||||
elif not best_result and best_fallback_result:
|
||||
addendum = u'non scene release filtered '
|
||||
best_result = best_fallback_result
|
||||
|
||||
if best_result:
|
||||
logger.log(u'Picked as the best %s[%s]' % (addendum, best_result.name), logger.DEBUG)
|
||||
else:
|
||||
logger.log(u'No result picked.', logger.DEBUG)
|
||||
|
||||
return best_result
|
||||
|
||||
|
||||
def best_candidate(best_result, cur_result):
|
||||
logger.log(u'Quality is %s for [%s]' % (Quality.qualityStrings[cur_result.quality], cur_result.name))
|
||||
|
||||
if not best_result or best_result.quality < cur_result.quality != Quality.UNKNOWN:
|
||||
best_result = cur_result
|
||||
|
||||
|
@ -245,11 +314,6 @@ def pick_best_result(results, show, quality_list=None):
|
|||
elif 'internal' in best_result.name.lower() and 'internal' not in cur_result.name.lower():
|
||||
best_result = cur_result
|
||||
|
||||
if best_result:
|
||||
logger.log(u'Picked as the best [%s]' % best_result.name, logger.DEBUG)
|
||||
else:
|
||||
logger.log(u'No result picked.', logger.DEBUG)
|
||||
|
||||
return best_result
|
||||
|
||||
|
||||
|
@ -449,7 +513,7 @@ def search_for_needed_episodes(episodes):
|
|||
continue
|
||||
|
||||
# find the best result for the current episode
|
||||
best_result = pick_best_result(cur_found_results[cur_ep], cur_ep.show)
|
||||
best_result = pick_best_result(cur_found_results[cur_ep], cur_ep.show, filter_rls=orig_thread_name)
|
||||
|
||||
# if all results were rejected move on to the next episode
|
||||
if not best_result:
|
||||
|
@ -488,6 +552,52 @@ def search_for_needed_episodes(episodes):
|
|||
return found_results.values()
|
||||
|
||||
|
||||
def can_reject(release_name):
|
||||
"""
|
||||
Check if a release name should be rejected at external services.
|
||||
If any site reports result as a valid scene release, then return None, None.
|
||||
If predb reports result as nuked, then return nuke reason and url attempted.
|
||||
If fail to find result at all services, return reject and url details for each site.
|
||||
|
||||
:param release_name: Release title
|
||||
:type release_name: String
|
||||
:return: None, None if release has no issue otherwise True/Nuke reason, URLs that rejected
|
||||
:rtype: Tuple (None, None or True/String, String)
|
||||
"""
|
||||
rej_urls = []
|
||||
srrdb_url = 'https://www.srrdb.com/api/search/r:%s/order:date-desc' % re.sub('\]\[', '', release_name)
|
||||
resp = helpers.getURL(srrdb_url, json=True)
|
||||
if not resp:
|
||||
srrdb_rej = True
|
||||
rej_urls += ['Failed contact \'%s\'' % srrdb_url]
|
||||
else:
|
||||
srrdb_rej = (not len(resp.get('results', []))
|
||||
or release_name.lower() != resp.get('results', [{}])[0].get('release', '').lower())
|
||||
rej_urls += ([], ['\'%s\'' % srrdb_url])[srrdb_rej]
|
||||
|
||||
sane_name = helpers.full_sanitizeSceneName(release_name)
|
||||
predb_url = 'https://predb.ovh/api/v1/?q=@name "%s"' % sane_name
|
||||
resp = helpers.getURL(predb_url, json=True)
|
||||
predb_rej = True
|
||||
if not resp:
|
||||
rej_urls += ['Failed contact \'%s\'' % predb_url]
|
||||
elif 'success' == resp.get('status', '').lower():
|
||||
rows = resp and (resp.get('data') or {}).get('rows') or []
|
||||
for data in rows:
|
||||
if sane_name == helpers.full_sanitizeSceneName((data.get('name', '') or '').strip()):
|
||||
nuke_type = (data.get('nuke') or {}).get('type')
|
||||
if not nuke_type:
|
||||
predb_rej = not helpers.tryInt(data.get('preAt'))
|
||||
else:
|
||||
predb_rej = 'un' not in nuke_type and data.get('nuke', {}).get('reason', 'Reason not set')
|
||||
break
|
||||
rej_urls += ([], ['\'%s\'' % predb_url])[bool(predb_rej)]
|
||||
|
||||
pred = any([not srrdb_rej, not predb_rej])
|
||||
|
||||
return pred and (None, None) or (predb_rej or True, ', '.join(rej_urls))
|
||||
|
||||
|
||||
def search_providers(show, episodes, manual_search=False, torrent_only=False, try_other_searches=False, old_status=None, scheduled=False):
|
||||
found_results = {}
|
||||
final_results = []
|
||||
|
@ -742,7 +852,8 @@ def search_providers(show, episodes, manual_search=False, torrent_only=False, tr
|
|||
if 0 == len(found_results[provider_id][cur_ep]):
|
||||
continue
|
||||
|
||||
best_result = pick_best_result(found_results[provider_id][cur_ep], show, quality_list)
|
||||
best_result = pick_best_result(found_results[provider_id][cur_ep], show, quality_list,
|
||||
filter_rls=orig_thread_name)
|
||||
|
||||
# if all results were rejected move on to the next episode
|
||||
if not best_result:
|
||||
|
|
|
@ -6359,19 +6359,21 @@ class ConfigProviders(Config):
|
|||
# a 0 in the key spot indicates that no key is needed
|
||||
nzb_src.needs_auth = '0' != cur_key
|
||||
|
||||
attr = 'search_mode'
|
||||
if cur_id + '_' + attr in kwargs:
|
||||
setattr(nzb_src, attr, str(kwargs.get(cur_id + '_' + attr)).strip())
|
||||
|
||||
attr = 'filter'
|
||||
if hasattr(nzb_src, attr):
|
||||
setattr(nzb_src, attr,
|
||||
[k for k in nzb_src.may_filter.keys()
|
||||
if config.checkbox_to_value(kwargs.get('%s_filter_%s' % (cur_id, k)))])
|
||||
|
||||
for attr in ['search_fallback', 'enable_recentsearch', 'enable_backlog', 'enable_scheduled_backlog']:
|
||||
for attr in ['search_fallback', 'enable_recentsearch', 'enable_backlog', 'enable_scheduled_backlog',
|
||||
'scene_only', 'scene_loose', 'scene_loose_active',
|
||||
'scene_rej_nuked', 'scene_nuked_active',]:
|
||||
setattr(nzb_src, attr, config.checkbox_to_value(kwargs.get(cur_id + '_' + attr)))
|
||||
|
||||
for attr in ['scene_or_contain', 'search_mode']:
|
||||
attr_check = '%s_%s' % (cur_id, attr)
|
||||
if attr_check in kwargs:
|
||||
setattr(nzb_src, attr, str(kwargs.get(attr_check) or '').strip())
|
||||
else:
|
||||
sickbeard.newznabProviderList.append(new_provider)
|
||||
|
||||
|
@ -6404,10 +6406,21 @@ class ConfigProviders(Config):
|
|||
|
||||
# if it already exists then update it
|
||||
if cur_id in torrent_rss_sources:
|
||||
torrent_rss_sources[cur_id].name = cur_name
|
||||
torrent_rss_sources[cur_id].url = cur_url
|
||||
torrss_src = torrent_rss_sources[cur_id]
|
||||
|
||||
torrss_src.name = cur_name
|
||||
torrss_src.url = cur_url
|
||||
if cur_cookies:
|
||||
torrent_rss_sources[cur_id].cookies = cur_cookies
|
||||
torrss_src.cookies = cur_cookies
|
||||
|
||||
for attr in ['scene_only', 'scene_loose', 'scene_loose_active',
|
||||
'scene_rej_nuked', 'scene_nuked_active']:
|
||||
setattr(torrss_src, attr, config.checkbox_to_value(kwargs.get(cur_id + '_' + attr)))
|
||||
|
||||
for attr in ['scene_or_contain']:
|
||||
attr_check = '%s_%s' % (cur_id, attr)
|
||||
if attr_check in kwargs:
|
||||
setattr(torrss_src, attr, str(kwargs.get(attr_check) or '').strip())
|
||||
else:
|
||||
sickbeard.torrentRssProviderList.append(new_provider)
|
||||
|
||||
|
@ -6472,25 +6485,27 @@ class ConfigProviders(Config):
|
|||
for attr in [x for x in ['minseed', 'minleech'] if hasattr(torrent_src, x)]:
|
||||
setattr(torrent_src, attr, config.to_int(str(kwargs.get(src_id_prefix + attr)).strip()))
|
||||
|
||||
for attr in [x for x in ['confirmed', 'freeleech', 'reject_m2ts', 'enable_recentsearch',
|
||||
'enable_backlog', 'search_fallback', 'enable_scheduled_backlog']
|
||||
if hasattr(torrent_src, x) and src_id_prefix + attr in kwargs]:
|
||||
setattr(torrent_src, attr, config.checkbox_to_value(kwargs.get(src_id_prefix + attr)))
|
||||
|
||||
attr = 'seed_time'
|
||||
if hasattr(torrent_src, attr) and src_id_prefix + attr in kwargs:
|
||||
setattr(torrent_src, attr, config.to_int(str(kwargs.get(src_id_prefix + attr)).strip()))
|
||||
|
||||
attr = 'search_mode'
|
||||
if hasattr(torrent_src, attr):
|
||||
setattr(torrent_src, attr, str(kwargs.get(src_id_prefix + attr, '')).strip() or 'eponly')
|
||||
|
||||
attr = 'filter'
|
||||
if hasattr(torrent_src, attr):
|
||||
setattr(torrent_src, attr,
|
||||
[k for k in torrent_src.may_filter.keys()
|
||||
if config.checkbox_to_value(kwargs.get('%sfilter_%s' % (src_id_prefix, k)))])
|
||||
|
||||
for attr in [x for x in ['confirmed', 'freeleech', 'reject_m2ts', 'enable_recentsearch',
|
||||
'enable_backlog', 'search_fallback', 'enable_scheduled_backlog',
|
||||
'scene_only', 'scene_loose', 'scene_loose_active',
|
||||
'scene_rej_nuked', 'scene_nuked_active']
|
||||
if hasattr(torrent_src, x) and src_id_prefix + x in kwargs]:
|
||||
setattr(torrent_src, attr, config.checkbox_to_value(kwargs.get(src_id_prefix + attr)))
|
||||
|
||||
for (attr, default) in [('scene_or_contain', ''), ('search_mode', 'eponly')]:
|
||||
if hasattr(torrent_src, attr):
|
||||
setattr(torrent_src, attr, str(kwargs.get(src_id_prefix + attr) or default).strip())
|
||||
|
||||
# update nzb source settings
|
||||
for nzb_src in [src for src in sickbeard.providers.sortedProviderList() if
|
||||
sickbeard.GenericProvider.NZB == src.providerType]:
|
||||
|
@ -6506,18 +6521,21 @@ class ConfigProviders(Config):
|
|||
if hasattr(nzb_src, attr):
|
||||
setattr(nzb_src, attr, str(kwargs.get(src_id_prefix + attr, '')).strip() or None)
|
||||
|
||||
attr = 'search_mode'
|
||||
if hasattr(nzb_src, attr):
|
||||
setattr(nzb_src, attr, str(kwargs.get(src_id_prefix + attr, '')).strip() or 'eponly')
|
||||
|
||||
attr = 'enable_recentsearch'
|
||||
if hasattr(nzb_src, attr):
|
||||
setattr(nzb_src, attr, config.checkbox_to_value(kwargs.get(src_id_prefix + attr)) or
|
||||
not getattr(nzb_src, 'supports_backlog', True))
|
||||
|
||||
for attr in [x for x in ['search_fallback', 'enable_backlog', 'enable_scheduled_backlog'] if hasattr(nzb_src, x)]:
|
||||
for attr in [x for x in ['search_fallback', 'enable_backlog', 'enable_scheduled_backlog',
|
||||
'scene_only', 'scene_loose', 'scene_loose_active',
|
||||
'scene_rej_nuked', 'scene_nuked_active']
|
||||
if hasattr(nzb_src, x)]:
|
||||
setattr(nzb_src, attr, config.checkbox_to_value(kwargs.get(src_id_prefix + attr)))
|
||||
|
||||
for (attr, default) in [('scene_or_contain', ''), ('search_mode', 'eponly')]:
|
||||
if hasattr(nzb_src, attr):
|
||||
setattr(nzb_src, attr, str(kwargs.get(src_id_prefix + attr) or default).strip())
|
||||
|
||||
sickbeard.NEWZNAB_DATA = '!!!'.join([x.config_str() for x in sickbeard.newznabProviderList])
|
||||
sickbeard.PROVIDER_ORDER = provider_list
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load diff
Loading…
Reference in a new issue