Merge branch 'release/0.17.0'

This commit is contained in:
JackDandy 2018-08-24 23:59:27 +01:00
commit 790e4e24e9
59 changed files with 5014 additions and 2666 deletions

View file

@ -1,4 +1,29 @@
### 0.16.23 (2018-08-21 21:00:00 UTC)
### 0.17.0 (2018-08-24 23:40:00 UTC)
* Change save config values only where reqd. reduces file by up to 75%
* Add 'Map an NZBGet "DestDir"' setting to config/Search/NZB Results tab (select NZBGet)
* Add TVDB, TheXem, and GitHub buttons to page History/Layout "Provider fails" that fetches a site Up/Down report
* Add bubble links to History/Provider fails when more than one provider has failures
* Add "Keep up to x most recent downloads" to Edit Show/Other
* Add "Keep up to x most recent downloads" to Manage/Bulk Change/Edit
* Change append number of downloads to keep to the number of file(s) at Display Show
* Add "Keep up to x most recent downloads" to add show finally step
* Add prune to refreshDir/rescan
* Update Tornado Web Server 5.0.1 (35a538f) to 5.0.1 (2b2a220a)
* Add HDME torrent provider
* Add HorribleSubs torrent provider
* Add ImmortalSeed torrent provider
* Add Xspeeds torrent provider
* Change consolidate provider filters into 'Only allow releases that are'
* Add provider filters, Only allow releases that are ...
'scene releases (srrDB/predb listed)', 'or contain' text or regex,
'non scene if no recent search results', 'non scene if no active search results',
'not scene nuked', and 'nuked if no active search results'
* Change improve tvdb_api performance; remember if episodes are cached and reload show if not and episodes are requested
* Change remove redundant torrent URLs and improve provider loader
### 0.16.23 (2018-08-21 21:00:00 UTC)
* Fix detection of existing files
* Change add sanitize 'imdbid' field in tvdb_api v2

View file

@ -2,7 +2,7 @@
GOTO :main
*******************************************************************************
onTxComplete.bat v1.0 for Sickgear
onTxComplete.bat v1.0 for SickGear
Script to copy select files to a location for SickGear to post process.

View file

@ -70,10 +70,12 @@ pre .prelight-num{
border-color:#222
}
.component-group.bubblelist a,
.ui-widget-content a{
color:#2d8fbf
}
.component-group.bubblelist a:hover,
.ui-widget-content a:hover{
color:#09a2ff
}
@ -990,7 +992,7 @@ fieldset[disabled] .navbar-default .btn-link:focus{
color:#ddd
}
.component-group.typelist .bgcol,
.component-group.bubblelist .bgcol,
.dropdown-menu{
background-color:#333;
border:1px solid rgba(0, 0, 0, 0.15);

View file

@ -65,10 +65,12 @@ pre .prelight-num{
border-color:#fff
}
.component-group.bubblelist a,
.ui-widget-content a{
color:rgb(42, 100, 150)
}
.component-group.bubblelist a:hover,
.ui-widget-content a:hover{
color:#09a2ff
}
@ -963,7 +965,7 @@ fieldset[disabled] .navbar-default .btn-link:focus{
background-color:#333
}
.component-group.typelist .bgcol,
.component-group.bubblelist .bgcol,
.dropdown-menu{
background-color:#f5f1e4;
border:1px solid rgba(0, 0, 0, 0.15);

View file

@ -236,6 +236,7 @@ inc_top.tmpl
border:1px solid
}
.component-group.bubblelist a,
.ui-widget-content a{
text-decoration:none
}
@ -3230,7 +3231,7 @@ td.tableright{
}
.optionWrapper{
width:450px;
width:475px;
margin-left:auto;
margin-right:auto;
padding:6px 12px
@ -3246,7 +3247,7 @@ td.tableright{
.optionWrapper div.selectChoices{
float:left;
width:175px;
width:200px;
margin-left:25px
}
@ -3282,6 +3283,9 @@ input.get_less_eps{
margin:0 6px 0 0;
min-width:70px
}
#provider-failures .check-site .btn{
min-width:115px
}
#media-search .btn.shows-more,
#media-search .btn.shows-less,
#provider-failures .btn.shows-more,
@ -3604,6 +3608,23 @@ img[src=""],img:not([src]){
top:-999px
}
.box-green{
background-color:#68b92b
}
.box-red{
background-color:#b72828
}
.box-green,
.box-red{
color:#eee;
padding:0 10px;
text-align:center;
text-decoration:none;
border-radius:4px 4px 4px 4px;
-moz-border-radius:3px;
-webkit-border-radius:3px
}
/* =======================================================================
bootstrap Overrides
========================================================================== */
@ -3782,6 +3803,18 @@ fieldset[disabled] .navbar-default .btn-link:focus{
color:#ccc
}
.component-group.bubble.last{padding:0;margin:0;border-bottom:none}
.component-group.bubblelist{min-height:30px}
.component-group.bubblelist .type{padding:6px}
.component-group.bubblelist .item{display:inline-block}
.component-group.bubblelist .item img{margin-right:4px}
.component-group.bubblelist .item.text{font-size:12px; padding-right:3px}
.component-group.bubblelist .item a{font-size:16px;padding-right:20px}
.component-group.bubblelist .item.text,
.component-group.bubblelist .item a{line-height:16px;vertical-align:middle}
.component-group.bubblelist .item a img{vertical-align:bottom;opacity:0.65;filter:alpha(opacity=65)}
.component-group.bubblelist .item a:hover img{opacity:1;filter:alpha(opacity=1)}
.dropdown-menu > li > a:hover, .dropdown-menu > li > a:focus{
color:#262626;
text-decoration:none;

BIN
gui/slick/images/iidrn.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 805 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 528 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 535 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 466 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1,013 B

View file

@ -38,8 +38,10 @@
<tr class="infoTableSeperator"><td class="infoTableHeader"><i class="icon16-sg"></i> Homepage</td><td class="infoTableCell"><a href="<%= anon_url('https://github.com/SickGear/SickGear/wiki') %>" rel="noreferrer" onclick="window.open(this.href, '_blank'); return false;">https://github.com/SickGear/SickGear/wiki</a></td></tr>
<tr><td class="infoTableHeader"><i class="icon16-github"></i> Source</td><td class="infoTableCell"><a href="<%= anon_url('https://github.com/SickGear/SickGear/') %>" rel="noreferrer" onclick="window.open(this.href, '_blank'); return false;">https://github.com/SickGear/SickGear/</a></td></tr>
<tr><td class="infoTableHeader"><i class="icon16-mirc"></i> Internet Relay Chat</td><td class="infoTableCell"><a href="irc://irc.freenode.net/#SickGear" rel="noreferrer"><i>#SickGear</i> on <i>irc.freenode.net</i></a></td></tr>
<tr class="infoTableSeperator"><td class="infoTableHeader">Powered by</td><td class="infoTableCell">Python, HTML5, jQuery, SQLite, TheTVDB, Trakt.tv, Fanart.tv, TMDb, GitHub</td></tr>
<tr><td class="infoTableHeader">&nbsp;</td><td class="infoTableHeader">This project uses the TMDb API but is not endorsed or certified by TMDb.</td></tr>
<tr class="infoTableSeperator"><td class="infoTableHeader">Powered by</td><td class="infoTableCell">Python, HTML5, jQuery, SQLite, Regex, CSS, Javascript, Tornado webserver</td></tr>
<tr><td class="infoTableHeader">&nbsp;</td><td class="infoTableHeader">Huge thanks to Jetbrains for PyCharm IDE, trust them with your development project</td></tr>
<tr><td class="infoTableHeader">Credits to</td><td class="infoTableHeader">Also; TheTVDB, Trakt.tv, TVMaze, Fanart.tv, IMDb, TheXem, srrDB, Predb, and GitHub</td></tr>
<tr><td class="infoTableHeader">&nbsp;</td><td class="infoTableHeader">This project uses the TMDb API but is not endorsed or certified by TMDb</td></tr>
</table>
</div>

View file

@ -23,17 +23,6 @@
<h1 class="title">$title</h1>
#end if
<style>
.component-group.typelist{min-height:30px}
.component-group.typelist .type{padding:6px}
.component-group.typelist .item{display:inline-block}
.component-group.typelist .item img{margin-right:4px}
.component-group.typelist .item.text{font-size:12px; padding-right:3px}
.component-group.typelist .item a{font-size:16px;padding-right:20px}
.component-group.typelist .item.text,
.component-group.typelist .item a{line-height:16px;vertical-align:middle}
</style>
<img src="$sbRoot/images/loading16#echo ('', '-dark')['dark' == $sickbeard.THEME_NAME]#.gif" height="16" width="16" style="display:none">
<div id="config">
<div id="config-content">
@ -52,17 +41,17 @@
<div id="tabs-1">
<div class="component-group typelist">
<div class="component-group bubblelist">
<div class="type bgcol">
<span class="list"><div class="item text">Bubble links:</div>
<div class="item"><img height="16px" src="$sbRoot/images/notifiers/emby.png"><a href="#emby" rel="noreferrer">Emby</a></div>
<div class="item"><img height="16px" src="$sbRoot/images/notifiers/kodi.png"><a href="#kodi" rel="noreferrer">Kodi</a></div>
<div class="item"><img height="16px" src="$sbRoot/images/notifiers/plex.png"><a href="#plex" rel="noreferrer">Plex</a></div>
<div class="item"><img height="16px" src="$sbRoot/images/notifiers/nmj.png"><a href="#nmj" rel="noreferrer">NMJ</a></div>
<div class="item"><img height="16px" src="$sbRoot/images/notifiers/nmj.png"><a href="#nmjv2" rel="noreferrer">NMJv2</a></div>
<div class="item"><img height="16px" src="$sbRoot/images/notifiers/synoindex.png"><a href="#synoindexer" rel="noreferrer">Syno Indexer</a></div>
<div class="item"><img height="16px" src="$sbRoot/images/notifiers/synologynotifier.png"><a href="#synonotifier" rel="noreferrer">Syno Notifier</a></div>
<div class="item"><img height="16px" src="$sbRoot/images/notifiers/pytivo.png"><a href="#pytivo" rel="noreferrer">pyTivo</a></div>
<div class="item"><a href="#emby" rel="noreferrer"><img height="16px" src="$sbRoot/images/notifiers/emby.png">Emby</a></div>
<div class="item"><a href="#kodi" rel="noreferrer"><img height="16px" src="$sbRoot/images/notifiers/kodi.png">Kodi</a></div>
<div class="item"><a href="#plex" rel="noreferrer"><img height="16px" src="$sbRoot/images/notifiers/plex.png">Plex</a></div>
<div class="item"><a href="#nmj" rel="noreferrer"><img height="16px" src="$sbRoot/images/notifiers/nmj.png">NMJ</a></div>
<div class="item"><a href="#nmjv2" rel="noreferrer"><img height="16px" src="$sbRoot/images/notifiers/nmj.png">NMJv2</a></div>
<div class="item"><a href="#synoindexer" rel="noreferrer"><img height="16px" src="$sbRoot/images/notifiers/synoindex.png">Syno Indexer</a></div>
<div class="item"><a href="#synonotifier" rel="noreferrer"><img height="16px" src="$sbRoot/images/notifiers/synologynotifier.png">Syno Notifier</a></div>
<div class="item"><a href="#pytivo" rel="noreferrer"><img height="16px" src="$sbRoot/images/notifiers/pytivo.png">pyTivo</a></div>
</span>
</div>
</div>
@ -841,18 +830,18 @@
<div id="tabs-2">
<div class="component-group typelist">
<div class="component-group bubblelist">
<div class="type bgcol">
<span class="list"><div class="item text">Bubble links:</div>
<div class="item"><img height="16px" src="$sbRoot/images/notifiers/boxcar2.png"><a href="#boxcar2" rel="noreferrer">Boxcar2</a></div>
<div class="item"><a href="#boxcar2" rel="noreferrer"><img height="16px" src="$sbRoot/images/notifiers/boxcar2.png">Boxcar2</a></div>
#if 'PUSHALOT' in NotifierFactory().notifiers
<div class="item"><img height="16px" src="$sbRoot/images/notifiers/pushalot.png"><a href="#pushalot" rel="noreferrer">Pushalot</a></div>
<div class="item"><a href="#pushalot" rel="noreferrer"><img height="16px" src="$sbRoot/images/notifiers/pushalot.png">Pushalot</a></div>
#end if
<div class="item"><img height="16px" src="$sbRoot/images/notifiers/pushbullet.png"><a href="#pushbullet" rel="noreferrer">Pushbullet</a></div>
<div class="item"><img height="16px" src="$sbRoot/images/notifiers/pushover.png"><a href="#pushover" rel="noreferrer">Pushover</a></div>
<div class="item"><img height="16px" src="$sbRoot/images/notifiers/growl.png"><a href="#growl" rel="noreferrer">Growl</a></div>
<div class="item"><img height="16px" src="$sbRoot/images/notifiers/prowl.png"><a href="#prowl" rel="noreferrer">Prowl</a></div>
<div class="item"><img height="16px" src="$sbRoot/images/notifiers/libnotify.png"><a href="#libnotify" rel="noreferrer">Libnotify</a></div>
<div class="item"><a href="#pushbullet" rel="noreferrer"><img height="16px" src="$sbRoot/images/notifiers/pushbullet.png">Pushbullet</a></div>
<div class="item"><a href="#pushover" rel="noreferrer"><img height="16px" src="$sbRoot/images/notifiers/pushover.png">Pushover</a></div>
<div class="item"><a href="#growl" rel="noreferrer"><img height="16px" src="$sbRoot/images/notifiers/growl.png">Growl</a></div>
<div class="item"><a href="#prowl" rel="noreferrer"><img height="16px" src="$sbRoot/images/notifiers/prowl.png">Prowl</a></div>
<div class="item"><a href="#libnotify" rel="noreferrer"><img height="16px" src="$sbRoot/images/notifiers/libnotify.png">Libnotify</a></div>
</span>
</div>
</div>
@ -1437,15 +1426,15 @@
<div id="tabs-3">
<div class="component-group typelist">
<div class="component-group bubblelist">
<div class="type bgcol">
<span class="list"><div class="item text">Bubble links:</div>
<div class="item"><img height="16px" src="$sbRoot/images/notifiers/trakt.png"><a href="#trakt" rel="noreferrer">Trakt</a></div>
<div class="item"><img height="16px" src="$sbRoot/images/notifiers/slack.png"><a href="#slack" rel="noreferrer">Slack</a></div>
<div class="item"><img height="16px" src="$sbRoot/images/notifiers/discordapp.png"><a href="#discordapp" rel="noreferrer">Discordapp</a></div>
<div class="item"><img height="16px" src="$sbRoot/images/notifiers/gitter.png"><a href="#gitter" rel="noreferrer">Gitter</a></div>
<div class="item"><img height="16px" src="$sbRoot/images/notifiers/twitter.png"><a href="#twitter" rel="noreferrer">Twitter</a></div>
<div class="item"><img height="16px" src="$sbRoot/images/notifiers/email.png"><a href="#email" rel="noreferrer">Email</a></div>
<div class="item"><a href="#trakt" rel="noreferrer"><img height="16px" src="$sbRoot/images/notifiers/trakt.png">Trakt</a></div>
<div class="item"><a href="#slack" rel="noreferrer"><img height="16px" src="$sbRoot/images/notifiers/slack.png">Slack</a></div>
<div class="item"><a href="#discordapp" rel="noreferrer"><img height="16px" src="$sbRoot/images/notifiers/discordapp.png">Discordapp</a></div>
<div class="item"><a href="#gitter" rel="noreferrer"><img height="16px" src="$sbRoot/images/notifiers/gitter.png">Gitter</a></div>
<div class="item"><a href="#twitter" rel="noreferrer"><img height="16px" src="$sbRoot/images/notifiers/twitter.png">Twitter</a></div>
<div class="item"><a href="#email" rel="noreferrer"><img height="16px" src="$sbRoot/images/notifiers/email.png">Email</a></div>
</span>
</div>
</div>

View file

@ -2,6 +2,7 @@
#from sickbeard.clients import get_client_instance
#from sickbeard.providers.generic import GenericProvider
#from sickbeard.providers import thepiratebay
#from sickbeard.providers.newznab import NewznabConstants
#from sickbeard.helpers import anon_url, starify
##
#set global $title = 'Config - Providers'
@ -28,32 +29,20 @@
#if not $sickbeard.USE_TORRENTS
$methods_notused.append('Torrent')
#end if
#slurp
#if $sickbeard.USE_NZBS or $sickbeard.USE_TORRENTS
<script type="text/javascript" charset="utf-8">
<!--
\$(document).ready(function(){
#if $sickbeard.USE_NZBS
var show_nzb_providers = <%= 'true' if sickbeard.USE_NZBS else 'false' %>;
#for $cur_newznab_provider in $sickbeard.newznabProviderList:
\$(this).addProvider('$cur_newznab_provider.get_id()', '$cur_newznab_provider.name', '$cur_newznab_provider.url', '<%= starify(cur_newznab_provider.key) %>', '$cur_newznab_provider.cat_ids', $int($cur_newznab_provider.default), show_nzb_providers);
\$(this).addProvider('$cur_newznab_provider.get_id()', '$cur_newznab_provider.name', '$cur_newznab_provider.url', '<%= starify(cur_newznab_provider.key) %>', '$cur_newznab_provider.cat_ids', $int($cur_newznab_provider.default), !0);
#end for
#end if
#if $sickbeard.USE_TORRENTS
#for $cur_torrent_rss_provider in $sickbeard.torrentRssProviderList:
\$(this).addTorrentRssProvider('$cur_torrent_rss_provider.get_id()', '$cur_torrent_rss_provider.name', '$cur_torrent_rss_provider.url', '<%= starify(cur_torrent_rss_provider.cookies) %>');
\$(this).addTorrentRssProvider('$cur_torrent_rss_provider.get_id()', '$cur_torrent_rss_provider.name', '$cur_torrent_rss_provider.url', '<%= starify(cur_torrent_rss_provider.cookies) %>');
#end for
#end if
});
//-->
@ -63,6 +52,7 @@
##
#set $html_selected = ' selected="selected"'
#set $html_checked = 'checked="checked" '
#set $backlog_only_tip = False
<div id="config" class="search_providers">
<div id="config-content">
@ -91,7 +81,6 @@
<p>Allows searching recent and past releases.</p>
<p>Check off and drag providers into the order you want them to be used.</p>
<p>At least one provider is required, two are recommended.</p>
#if $methods_notused
<blockquote style="margin:20px 0"><%= '/'.join(x for x in methods_notused) %> providers can be enabled in <a href="$sbRoot/config/search/">Search Settings</a></blockquote>
#else
@ -107,6 +96,7 @@
#set $cur_name = $cur_provider.get_id()
#set $cur_url = $cur_provider.url
#set $show_type = $sickbeard.USE_NZBS and $sickbeard.USE_TORRENTS and $GenericProvider.NZB == $cur_provider.providerType
#set $spotweb = ('', 'sw ')[$getattr($cur_provider, 'server_type', None) == $NewznabConstants.SERVER_SPOTWEB and type($cur_provider).__name__ not in ['TorrentRssProvider']]
#set $bad_url = not $cur_url and cur_provider.is_enabled()
#set $tip = ($cur_provider.name + ('', ' (enable for link)')[not $cur_url and not cur_provider.is_enabled()],
'Site Down')[$bad_url]
@ -115,13 +105,11 @@
<input type="checkbox" id="enable_$cur_name" class="provider_enabler" <%= html_checked if cur_provider.is_enabled() else '' %>/>
<a class="imgLink" #if $cur_url#href="<%= anon_url(cur_url) %>" onclick="window.open(this.href,'_blank');return false;"#else#name=""#end if# rel="noreferrer"><img src="$sbRoot/images/providers/$cur_provider.image_name()" alt="$tip" title="$tip" width="16" height="16" style="vertical-align:middle" /></a>
<span style="vertical-align:middle">$cur_provider.name$state</span>
#if $cur_provider.is_public_access() and type($cur_provider).__name__ not in ['TorrentRssProvider']
#if $cur_provider.is_public_access() and type($cur_provider).__name__ not in ['TorrentRssProvider']#
<span style="font-size:10px;vertical-align:top;font-weight:normal">(PA)</span>
#end if#
#if $show_type
<span style="font-size:10px;vertical-align:top;font-weight:normal">($cur_provider.providerType)</span>
#end if#
#if not $cur_provider.supports_backlog#*#end if#
#end if##if $show_type##slurp
<span style="font-size:10px;vertical-align:top;font-weight:normal">($spotweb$cur_provider.providerType)</span>
#end if##if not $cur_provider.supports_backlog#*#set $backlog_only_tip=True##end if##slurp
<span class="ui-icon ui-icon-arrowthick-2-n-s pull-right" style="margin-top:3px"></span>
</li>
#end for
@ -131,6 +119,9 @@
#if $sickbeard.USE_NZBS or $sickbeard.USE_TORRENTS
<div id="provider_key">
<span style="float:left;font-size:10px;vertical-align:top;font-weight:normal">(PA)</span><p class="note">Public access, no account required</p>
#if $backlog_only_tip
<h4 class="note">*</h4><p class="note">No backlog, latest releases only</p>
#end if
## #if $sickbeard.USE_TORRENTS
## <h4 class="note">**</h4><p class="note">Supports <b>limited</b> backlog searches, some episodes/qualities may not be available</p>
## #end if
@ -167,12 +158,11 @@
if $x.providerType == $GenericProvider.NZB and $sickbeard.USE_NZBS or
$x.providerType == $GenericProvider.TORRENT and $sickbeard.USE_TORRENTS]
#if $cur_provider.is_enabled()
$provider_config_list_enabled.append($cur_provider)
#set void = $provider_config_list_enabled.append($cur_provider)
#else
$provider_config_list.append($cur_provider)
#set void = $provider_config_list.append($cur_provider)
#end if
#end for
#if $provider_config_list + $provider_config_list_enabled
<select id="editAProvider" class="form-control input-sm">
#if $provider_config_list_enabled
@ -201,6 +191,14 @@
#set $recentsearch_tip = 'match recent episodes from results of latest releases'
#set $backlogsearch_tip = 'allow active searching for individual episode releases'
#set $scheduled_backlog_tip = 'enable scheduled searching for backlogged episodes'
#set $filter_title = 'Only allow releases that are'
#set $filter_scene_only_desc = 'scene releases (srrDB/predb listed)'
#set $filter_scene_or_contain_desc = '...or contain'
#set $filter_scene_loose_desc = 'non scene if no recent search results'
#set $filter_scene_loose_active_desc = 'non scene if no active search results'
#set $filter_scene_rej_nuked_desc = 'not scene nuked'
#set $filter_scene_nuked_active_desc = 'nuked if no active search results'
#set $filter_tip = 'nothing selected allows everything (i.e. no filtering, default)'
#for $cur_newznab_provider in [$cur_provider for $cur_provider in $sickbeard.newznabProviderList]
<div class="providerDiv" id="${cur_newznab_provider.get_id()}Div">
#set $can_recent = $hasattr($cur_newznab_provider, 'enable_recentsearch')
@ -212,13 +210,13 @@
<span class="component-desc">
#if $can_recent
<label for="${cur_newznab_provider.get_id()}_enable_recentsearch" style="display:inline">
<input type="checkbox" name="${cur_newznab_provider.get_id()}_enable_recentsearch" id="${cur_newznab_provider.get_id()}_enable_recentsearch" <%= html_checked if cur_newznab_provider.enable_recentsearch else '' %>/>
<input class="view-if" type="checkbox" name="${cur_newznab_provider.get_id()}_enable_recentsearch" id="${cur_newznab_provider.get_id()}_enable_recentsearch" <%= html_checked if cur_newznab_provider.enable_recentsearch else '' %>/>
<p>$recentsearch_tip</p>
</label>
#end if
#if $can_backlog
<label for="${cur_newznab_provider.get_id()}_enable_backlog" style="display:inline">
<input class="enabler" type="checkbox" name="${cur_newznab_provider.get_id()}_enable_backlog" id="${cur_newznab_provider.get_id()}_enable_backlog" <%= html_checked if cur_newznab_provider.enable_backlog else '' %>/>
<input class="enabler view-if" type="checkbox" name="${cur_newznab_provider.get_id()}_enable_backlog" id="${cur_newznab_provider.get_id()}_enable_backlog" <%= html_checked if cur_newznab_provider.enable_backlog else '' %>/>
<p>$backlogsearch_tip</p>
</label>
#end if
@ -253,23 +251,56 @@
</label>
</div>
#end if
#if $hasattr($cur_newznab_provider, 'may_filter'):
<div class="field-pair">
<span class="component-title">Allow releases that are</span>
<span class="component-title">$filter_title</span>
<span class="component-desc">
<div style="margin-bottom:10px">
<div style="float:left;max-width:230px">
<label for="${cur_newznab_provider.get_id()}_scene_only">
<input type="checkbox" name="${cur_newznab_provider.get_id()}_scene_only" id="${cur_newznab_provider.get_id()}_scene_only" <%= html_checked if cur_newznab_provider.scene_only else '' %>>
<span>$filter_scene_only_desc</span>
</label>
<label for="${cur_newznab_provider.get_id()}_scene_or_contain">
$filter_scene_or_contain_desc<input style="float:right;margin-left:4px;padding:2px 4px;height:24px;width:144px" type="text" name="${cur_newznab_provider.get_id()}_scene_or_contain" placeholder="(opt: start 'regex:')" value="<%= cur_newznab_provider.scene_or_contain %>" class="form-control input-sm input150">
</label>
</div>
<div style="margin-left:230px">
<label class="show-if-${cur_newznab_provider.get_id()}_enable_recentsearch" for="${cur_newznab_provider.get_id()}_scene_loose">
<input type="checkbox" name="${cur_newznab_provider.get_id()}_scene_loose" id="${cur_newznab_provider.get_id()}_scene_loose" <%= html_checked if cur_newznab_provider.scene_loose else '' %>>
<span>$filter_scene_loose_desc</span>
</label>
<label class="show-if-${cur_newznab_provider.get_id()}_enable_backlog" for="${cur_newznab_provider.get_id()}_scene_loose_active">
<input type="checkbox" name="${cur_newznab_provider.get_id()}_scene_loose_active" id="${cur_newznab_provider.get_id()}_scene_loose_active" <%= html_checked if cur_newznab_provider.scene_loose_active else '' %>>
<span>$filter_scene_loose_active_desc</span>
</label>
</div>
<div style="clear:both">
<label style="float:left;min-width:230px" for="${cur_newznab_provider.get_id()}_scene_rej_nuked">
<input type="checkbox" name="${cur_newznab_provider.get_id()}_scene_rej_nuked" id="${cur_newznab_provider.get_id()}_scene_rej_nuked" <%= html_checked if cur_newznab_provider.scene_rej_nuked else '' %>>
<span>$filter_scene_rej_nuked_desc</span>
</label>
<label class="show-if-${cur_newznab_provider.get_id()}_enable_backlog" for="${cur_newznab_provider.get_id()}_scene_nuked_active">
<input type="checkbox" name="${cur_newznab_provider.get_id()}_scene_nuked_active" id="${cur_newznab_provider.get_id()}_scene_nuked_active" <%= html_checked if cur_newznab_provider.scene_nuked_active else '' %>>
<span>$filter_scene_nuked_active_desc</span>
</label>
</div>
</div>
#if $hasattr($cur_newznab_provider, 'may_filter'):
<div>
#for $cur_fval, $filter in $cur_newznab_provider.may_filter.iteritems()
#set $cur_fname, $cur_is_default = $filter[0], $filter[1]
#set $filter_id = '%s_filter_%s' % ($cur_newznab_provider.get_id(), $cur_fval)
<label class="space-right">
<input type="checkbox" name="$filter_id" id="$filter_id" #echo ('', $html_checked)[$cur_fval in $cur_newznab_provider.filter]#/>
<span>$cur_fname</span>
</label>
<label class="space-right">
<input type="checkbox" name="$filter_id" id="$filter_id" #echo ('', $html_checked)[$cur_fval in $cur_newznab_provider.filter]#/>
<span>$cur_fname</span>
</label>
#end for
<span>(see site for meaning)</span>
<p>nothing selected allows everything (no filter, default)</p>
<span>(see $cur_newznab_provider.name)</span>
</div>
#end if
<p style="clear:both">$filter_tip</p>
</span>
</div>
#end if
#if $hasattr($cur_newznab_provider, 'search_mode') and $cur_newznab_provider.supports_backlog:
<div class="field-pair">
<span class="component-title">Episode search mode</span>
@ -312,13 +343,13 @@
<span class="component-desc">
#if $can_recent
<label for="${cur_nzb_provider.get_id()}_enable_recentsearch" style="display:inline">
<input type="checkbox" name="${cur_nzb_provider.get_id()}_enable_recentsearch" id="${cur_nzb_provider.get_id()}_enable_recentsearch" <%= html_checked if cur_nzb_provider.enable_recentsearch else '' %>/>
<input class="view-if" type="checkbox" name="${cur_nzb_provider.get_id()}_enable_recentsearch" id="${cur_nzb_provider.get_id()}_enable_recentsearch" <%= html_checked if cur_nzb_provider.enable_recentsearch else '' %>/>
<p>$recentsearch_tip</p>
</label>
#end if
#if $can_backlog
<label for="${cur_nzb_provider.get_id()}_enable_backlog" style="display:inline">
<input class="enabler" type="checkbox" name="${cur_nzb_provider.get_id()}_enable_backlog" id="${cur_nzb_provider.get_id()}_enable_backlog" <%= html_checked if cur_nzb_provider.enable_backlog else '' %>/>
<input class="enabler view-if" type="checkbox" name="${cur_nzb_provider.get_id()}_enable_backlog" id="${cur_nzb_provider.get_id()}_enable_backlog" <%= html_checked if cur_nzb_provider.enable_backlog else '' %>/>
<p>$backlogsearch_tip</p>
</label>
#end if
@ -357,6 +388,43 @@
</label>
</div>
#end if
<div class="field-pair">
<span class="component-title">$filter_title</span>
<span class="component-desc">
<div style="margin-bottom:10px">
<div style="float:left;max-width:230px">
<label for="${cur_nzb_provider.get_id()}_scene_only">
<input type="checkbox" name="${cur_nzb_provider.get_id()}_scene_only" id="${cur_nzb_provider.get_id()}_scene_only" <%= html_checked if cur_nzb_provider.scene_only else '' %>>
<span>$filter_scene_only_desc</span>
</label>
<label for="${cur_nzb_provider.get_id()}_scene_or_contain">
$filter_scene_or_contain_desc<input style="float:right;margin-left:4px;padding:2px 4px;height:24px;width:144px" type="text" name="${cur_nzb_provider.get_id()}_scene_or_contain" placeholder="(opt: start 'regex:')" value="<%= cur_nzb_provider.scene_or_contain %>" class="form-control input-sm input150">
</label>
</div>
<div style="margin-left:230px">
<label class="show-if-${cur_nzb_provider.get_id()}_enable_recentsearch" for="${cur_nzb_provider.get_id()}_scene_loose">
<input type="checkbox" name="${cur_nzb_provider.get_id()}_scene_loose" id="${cur_nzb_provider.get_id()}_scene_loose" <%= html_checked if cur_nzb_provider.scene_loose else '' %>>
<span>$filter_scene_loose_desc</span>
</label>
<label class="show-if-${cur_nzb_provider.get_id()}_enable_backlog" for="${cur_nzb_provider.get_id()}_scene_loose_active">
<input type="checkbox" name="${cur_nzb_provider.get_id()}_scene_loose_active" id="${cur_nzb_provider.get_id()}_scene_loose_active" <%= html_checked if cur_nzb_provider.scene_loose_active else '' %>>
<span>$filter_scene_loose_active_desc</span>
</label>
</div>
<div style="clear:both">
<label style="float:left;min-width:230px" for="${cur_nzb_provider.get_id()}_scene_rej_nuked">
<input type="checkbox" name="${cur_nzb_provider.get_id()}_scene_rej_nuked" id="${cur_nzb_provider.get_id()}_scene_rej_nuked" <%= html_checked if cur_nzb_provider.scene_rej_nuked else '' %>>
<span>$filter_scene_rej_nuked_desc</span>
</label>
<label class="show-if-${cur_nzb_provider.get_id()}_enable_backlog" for="${cur_nzb_provider.get_id()}_scene_nuked_active">
<input type="checkbox" name="${cur_nzb_provider.get_id()}_scene_nuked_active" id="${cur_nzb_provider.get_id()}_scene_nuked_active" <%= html_checked if cur_nzb_provider.scene_nuked_active else '' %>>
<span>$filter_scene_nuked_active_desc</span>
</label>
</div>
</div>
<p style="clear:both">$filter_tip</p>
</span>
</div>
#if $hasattr($cur_nzb_provider, 'search_mode') and $cur_nzb_provider.supports_backlog:
<div class="field-pair">
<span class="component-title">Episode search mode</span>
@ -415,13 +483,13 @@
<span class="component-desc">
#if $can_recent
<label for="${cur_torrent_provider.get_id()}_enable_recentsearch" style="display:inline">
<input type="checkbox" name="${cur_torrent_provider.get_id()}_enable_recentsearch" id="${cur_torrent_provider.get_id()}_enable_recentsearch" <%= html_checked if cur_torrent_provider.enable_recentsearch else '' %>/>
<input class="view-if" type="checkbox" name="${cur_torrent_provider.get_id()}_enable_recentsearch" id="${cur_torrent_provider.get_id()}_enable_recentsearch" <%= html_checked if cur_torrent_provider.enable_recentsearch else '' %>/>
<p>$recentsearch_tip</p>
</label>
#end if
#if $can_backlog
<label for="${cur_torrent_provider.get_id()}_enable_backlog" style="display:inline">
<input class="enabler" type="checkbox" name="${cur_torrent_provider.get_id()}_enable_backlog" id="${cur_torrent_provider.get_id()}_enable_backlog" <%= html_checked if cur_torrent_provider.enable_backlog else '' %>/>
<input class="enabler view-if" type="checkbox" name="${cur_torrent_provider.get_id()}_enable_backlog" id="${cur_torrent_provider.get_id()}_enable_backlog" <%= html_checked if cur_torrent_provider.enable_backlog else '' %>/>
<p>$backlogsearch_tip</p>
</label>
#end if
@ -452,11 +520,17 @@
</div>
#end if
#if $hasattr($cur_torrent_provider, 'api_key'):
#set $field_name = cur_torrent_provider.get_id() + '_api_key'
<div class="field-pair">
<label for="${cur_torrent_provider.get_id()}_api_key">
<span class="component-title">Api key:</span>
<label for="$field_name">
#set $url_label = callable(getattr(cur_torrent_provider, 'ui_string', None)) and cur_torrent_provider.ui_string($field_name) or 'Api key'
<span class="component-title">$url_label</span>
<span class="component-desc">
<input type="text" name="${cur_torrent_provider.get_id()}_api_key" id="${cur_torrent_provider.get_id()}_api_key" value="<%= starify(cur_torrent_provider.api_key) %>" class="form-control input-sm input350" />
<input type="text" name="$field_name" id="$field_name" value="<%= starify(cur_torrent_provider.api_key) %>" class="form-control input-sm input350" />
#if callable(getattr(cur_torrent_provider, 'ui_string', None))
#set $tip_text = cur_torrent_provider.ui_string($field_name + '_tip')
<div class="clear-left"><p>$tip_text</p></div>
#end if
</span>
</label>
</div>
@ -571,57 +645,86 @@ name = '' if not client else get_client_instance(sickbeard.TORRENT_METHOD)().nam
</label>
</div>
#end if
#if $hasattr($cur_torrent_provider, 'confirmed'):
<div class="field-pair">
<label for="${cur_torrent_provider.get_id()}_confirmed">
<span class="component-title">Confirmed download</span>
<span class="component-desc">
<input type="checkbox" name="${cur_torrent_provider.get_id()}_confirmed" id="${cur_torrent_provider.get_id()}_confirmed" <%= html_checked if cur_torrent_provider.confirmed else '' %>/>
#set $confirm_label = callable(getattr(cur_torrent_provider, 'ui_string', None)) and cur_torrent_provider.ui_string(cur_torrent_provider.get_id() + '_confirm') or 'only download torrents from trusted or verified uploaders ?'
<p>$confirm_label</p>
</span>
</label>
</div>
<span class="component-title">$filter_title</span>
<span class="component-desc">
<div style="margin-bottom:10px">
<div style="float:left;max-width:230px">
<label for="${cur_torrent_provider.get_id()}_scene_only">
<input type="checkbox" name="${cur_torrent_provider.get_id()}_scene_only" id="${cur_torrent_provider.get_id()}_scene_only" <%= html_checked if cur_torrent_provider.scene_only else '' %>>
<span>$filter_scene_only_desc</span>
</label>
<label for="${cur_torrent_provider.get_id()}_scene_or_contain">
$filter_scene_or_contain_desc<input style="float:right;margin-left:4px;padding:2px 4px;height:24px;width:144px" type="text" name="${cur_torrent_provider.get_id()}_scene_or_contain" placeholder="(opt: start 'regex:')" value="<%= cur_torrent_provider.scene_or_contain %>" class="form-control input-sm input150">
</label>
</div>
<div style="margin-left:230px">
<label class="show-if-${cur_torrent_provider.get_id()}_enable_recentsearch" for="${cur_torrent_provider.get_id()}_scene_loose">
<input type="checkbox" name="${cur_torrent_provider.get_id()}_scene_loose" id="${cur_torrent_provider.get_id()}_scene_loose" <%= html_checked if cur_torrent_provider.scene_loose else '' %>>
<span>$filter_scene_loose_desc</span>
</label>
#if $cur_torrent_provider.supports_backlog:
<label class="show-if-${cur_torrent_provider.get_id()}_enable_backlog" for="${cur_torrent_provider.get_id()}_scene_loose_active">
<input type="checkbox" name="${cur_torrent_provider.get_id()}_scene_loose_active" id="${cur_torrent_provider.get_id()}_scene_loose_active" <%= html_checked if cur_torrent_provider.scene_loose_active else '' %>>
<span>$filter_scene_loose_active_desc</span>
</label>
#end if
</div>
<div style="clear:both">
<label style="float:left;min-width:230px" for="${cur_torrent_provider.get_id()}_scene_rej_nuked">
<input type="checkbox" name="${cur_torrent_provider.get_id()}_scene_rej_nuked" id="${cur_torrent_provider.get_id()}_scene_rej_nuked" <%= html_checked if cur_torrent_provider.scene_rej_nuked else '' %>>
<span>$filter_scene_rej_nuked_desc</span>
</label>
#if $cur_torrent_provider.supports_backlog:
<label class="show-if-${cur_torrent_provider.get_id()}_enable_backlog" for="${cur_torrent_provider.get_id()}_scene_nuked_active">
<input type="checkbox" name="${cur_torrent_provider.get_id()}_scene_nuked_active" id="${cur_torrent_provider.get_id()}_scene_nuked_active" <%= html_checked if cur_torrent_provider.scene_nuked_active else '' %>>
<span>$filter_scene_nuked_active_desc</span>
</label>
<span class="hide-if-${cur_torrent_provider.get_id()}_enable_backlog">&nbsp;</span>
#end if
</div>
</div>
#if $hasattr($cur_torrent_provider, 'freeleech'):
<div class="field-pair">
<label for="${cur_torrent_provider.get_id()}_freeleech">
<span class="component-title">Freeleech</span>
<span class="component-desc">
<input type="checkbox" name="${cur_torrent_provider.get_id()}_freeleech" id="${cur_torrent_provider.get_id()}_freeleech" <%= html_checked if cur_torrent_provider.freeleech else '' %>/>
<p>only download <b>[FreeLeech]</b> torrents</p>
</span>
</label>
</div>
<div>
<label for="${cur_torrent_provider.get_id()}_freeleech" class="space-right">
<input type="checkbox" name="${cur_torrent_provider.get_id()}_freeleech" id="${cur_torrent_provider.get_id()}_freeleech" <%= html_checked if cur_torrent_provider.freeleech else '' %>/>
<span><b>[FreeLeech]</b> only</span>
</label>
</div>
#end if
#if $hasattr($cur_torrent_provider, 'confirmed'):
<div>
<label for="${cur_torrent_provider.get_id()}_confirmed">
<input type="checkbox" name="${cur_torrent_provider.get_id()}_confirmed" id="${cur_torrent_provider.get_id()}_confirmed" <%= html_checked if cur_torrent_provider.confirmed else '' %>/>
#set $confirm_label = callable(getattr(cur_torrent_provider, 'ui_string', None)) and cur_torrent_provider.ui_string(cur_torrent_provider.get_id() + '_confirm') or 'site trusted or from verified uploaders'
<span>$confirm_label</span>
</label>
</div>
#end if
#if $hasattr($cur_torrent_provider, 'may_filter'):
<div class="field-pair">
<span class="component-title">Allow releases that are</span>
<span class="component-desc">
<div>
#for $cur_fval, $filter in $cur_torrent_provider.may_filter.iteritems()
#set $cur_fname, $cur_is_default = $filter[0], $filter[1]
#set $filter_id = '%s_filter_%s' % ($cur_torrent_provider.get_id(), $cur_fval)
<label class="space-right">
<input type="checkbox" name="$filter_id" id="$filter_id" #echo ('', $html_checked)[$cur_fval in $cur_torrent_provider.filter]#/>
<span>$cur_fname</span>
</label>
<label class="space-right">
<input type="checkbox" name="$filter_id" id="$filter_id" #echo ('', $html_checked)[$cur_fval in $cur_torrent_provider.filter]#/>
<span>$cur_fname</span>
</label>
#end for
<span>(see site for meaning)</span>
<p>nothing selected allows everything (no filter, default)</p>
</span>
</div>
<span>(see $cur_torrent_provider.name)</span>
</div>
#end if
#if $hasattr($cur_torrent_provider, 'reject_m2ts'):
<div class="field-pair">
<label for="${cur_torrent_provider.get_id()}_reject_m2ts">
<span class="component-title">Reject Blu-ray M2TS releases</span>
<span class="component-desc">
<input type="checkbox" name="${cur_torrent_provider.get_id()}_reject_m2ts" id="${cur_torrent_provider.get_id()}_reject_m2ts" <%= html_checked if cur_torrent_provider.reject_m2ts else '' %>/>
<p>enable to ignore Blu-ray MPEG-2 Transport Stream container releases</p>
</span>
</label>
</div>
<div>
<label for="${cur_torrent_provider.get_id()}_reject_m2ts">
<input type="checkbox" name="${cur_torrent_provider.get_id()}_reject_m2ts" id="${cur_torrent_provider.get_id()}_reject_m2ts" <%= html_checked if cur_torrent_provider.reject_m2ts else '' %>/>
<span>not Blu-ray M2TS (MPEG-2 Transport Stream) container releases</span>
</label>
</div>
#end if
<p style="clear:both">$filter_tip</p>
</span>
</div>
#if $hasattr($cur_torrent_provider, 'search_mode') and $cur_torrent_provider.supports_backlog:
<div class="field-pair">
<span class="component-title">Episode search mode</span>

View file

@ -240,7 +240,7 @@
<span class="component-title">Send .nzb files to:</span>
<span class="component-desc">
<select name="nzb_method" id="nzb_method" class="form-control input-sm">
#set $nzb_method_text = {'blackhole': "Black hole", 'sabnzbd': "SABnzbd", 'nzbget': "NZBget"}
#set $nzb_method_text = {'blackhole': "Black hole", 'sabnzbd': "SABnzbd", 'nzbget': "NZBGet"}
#for $curAction in ('sabnzbd', 'blackhole', 'nzbget'):
#set $selected = $html_selected if $sickbeard.NZB_METHOD == $curAction else ''
<option value="$curAction"$selected>$nzb_method_text[$curAction]</option>
@ -328,18 +328,18 @@
<div class="field-pair">
<label>
<span class="component-title">NZBget host:port</span>
<span class="component-title">NZBGet host:port</span>
<span class="component-desc">
<input type="text" name="nzbget_host" id="nzbget_host" value="$sickbeard.NZBGET_HOST" class="form-control input-sm input350">
<p>(e.g. localhost:6789)</p>
<p class="clear-left note">NZBget RPC host name and port number (not NZBgetweb!)</p>
<p class="clear-left note">NZBGet RPC host name and port number (not NZBGetweb!)</p>
</span>
</label>
</div>
<div class="field-pair">
<label>
<span class="component-title">NZBget username</span>
<span class="component-title">NZBGet username</span>
<span class="component-desc">
<input type="text" name="nzbget_username" id="nzbget_username" value="$sickbeard.NZBGET_USERNAME" class="form-control input-sm input200">
<p>locate in nzbget.conf (default:nzbget)</p>
@ -349,7 +349,7 @@
<div class="field-pair">
<label>
<span class="component-title">NZBget password</span>
<span class="component-title">NZBGet password</span>
<span class="component-desc">
<input type="password" autocomplete="nope" name="nzbget_password" id="nzbget_password" value="#echo '*' * len($sickbeard.NZBGET_PASSWORD)#" class="form-control input-sm input200">
<p>locate in nzbget.conf (default:tegbzn6789)</p>
@ -359,7 +359,7 @@
<div class="field-pair">
<label>
<span class="component-title">Use NZBget category</span>
<span class="component-title">Use NZBGet category</span>
<span class="component-desc">
<input type="text" name="nzbget_category" id="nzbget_category" value="$sickbeard.NZBGET_CATEGORY" class="form-control input-sm input200">
<p>send downloads marked this category (e.g. TV)</p>
@ -390,7 +390,7 @@
#end if
<div class="field-pair">
<label>
<span class="component-title">NZBget priority</span>
<span class="component-title">NZBGet priority</span>
<span class="component-desc">
<select name="nzbget_priority" id="nzbget_priority" class="form-control input-sm">
<option value="-100"${prio_verylow}>Very low</option>
@ -404,11 +404,24 @@
</span>
</label>
</div>
<div class="field-pair">
<label>
<span class="component-title">Map an NZBGet "DestDir"</span>
<span class="component-desc">
<input type="text" name="nzbget_parent_map" value="$sickbeard.NZBGET_MAP" placeholder="DestDir=path known locally as" class="form-control input-sm input250">
<p>link a remote path to a local path</p>
<div class="clear-left" style="line-height:initial"><p>optional: where SG is not installed on the NZBGet system (used by process media)<br>
(e.g. d:\tvstuff = c:\tv, /mnt/tv = /tv, /remote/nzbget/DestDir = /locally/known/as/dir)</p></div>
</span>
</label>
</div>
</div>
<div class="test-notification" id="test-nzb-result">Click below to test</div>
<input type="button" value="Test SABnzbd" id="test_sabnzbd" class="btn test-button sabnzbd">
<input type="button" value="Test NZBget" id="test_nzbget" class="btn test-button nzbget">
<input type="button" value="Test NZBGet" id="test_nzbget" class="btn test-button nzbget">
<input type="submit" class="btn config_submitter" value="Save Changes"><br />
</div><!-- /content_use_nzbs //-->

View file

@ -336,7 +336,9 @@
<span class="label addQTip" title="Location#echo (' no longer exists" style="background-color:#8f1515"', '"')[$showLoc[1]]#>$showLoc[0]</span>
<span class="label addQTip" title="Size">$human($get_size($showLoc[0]))</span>
#set $filecount = sum([$c for $k, $c in $ep_counts['videos'].items()])
<span class="label addQTip" title="Videos">#echo ('No', $filecount)[0 < $filecount]# file$maybe_plural($filecount)</span>
#set $to_prune = $show.prune - $filecount
#set $keep_or_prune = ('', ' (%s)' % ('%s to prune' % abs($to_prune), 'keep %s' % $show.prune)[0 <= $to_prune])[bool($show.prune)]
<span class="label addQTip" title="Videos">#echo '%s file%s%s' % (('No', $filecount)[0 < $filecount], $maybe_plural($filecount), $keep_or_prune)#</span>
#if $show.paused
<span class="label label-paused">Paused</span>
#end if

View file

@ -227,6 +227,16 @@
<div id="core-component-group3" class="component-group">
<div class="field-pair">
<label for="prune">
<span class="component-title">Keep up to</span>
<span class="component-desc">
<input type="text" name="prune" id="prune" value="#echo $show.prune and $show.prune or ''#" class="form-control form-control-inline input-sm input75" style="width:50px">
<p>most recent downloads (blank for all)</p>
</span>
</label>
</div>
<div class="field-pair">
<label for="location">
<span class="component-title">Location for files</span>

View file

@ -589,46 +589,80 @@
##
##
<div id="provider-failures">
<div style="padding-bottom:10px">
#for ($check, $check_name, $check_url) in [
('tvdb', 'TVDB Api', 'api.thetvdb.com'), ('thexem', 'The Xem', 'thexem.de'), ('github', 'GitHub', 'github.com'),
]
<div id="check-$check" data-check="check_$check" class="check-site" style="margin-bottom:10px">
<input type="button" class="btn" value="Check $check_name">
<span style="line-height:26px">Test if site is up<span class="result"></span>
<a class="addQTip" style="margin-left:2px;display:none" href="$sickbeard.helpers.anon_url('http://www.isitdownrightnow.com/downorjustme.php?url=' + $check_url)" rel="noreferrer" onclick="window.open(this.href, '_blank'); return !1;" title="View full report for $check_name in new tab"><img alt="[IsItDown]" height="16" width="16" src="$sbRoot/images/iidrn.png" /></a>
</span>
</div>
#end for
</div>
#if not $provider_fails
<p>No current failures. Failure stats display here when appropriate.</p>
<p>No current provider failures. Failure stats display here when appropriate.</p>
#else
<p>When a provider cannot be contacted over a period, SickGear backs off and waits an increasing interval between each retry</p>
#for $prov in $provider_fail_stats
<style>
.component-group{min-height:50px}
.component-group.bubblelist{padding:0;border-bottom:none}
.component-group.bubblelist .item a{font-size:14px;padding-right:14px}
</style>
#set dev = (1, 3)[False]
#if 1 < len([$prov for $prov in $provider_fail_stats * $dev if len($prov['fails'])])
<div class="component-group bubblelist" style="margin:0 0 3px">
<div class="type bgcol">
<span class="list"><div class="item text">Bubble links:</div>
#for $n, $prov in enumerate($provider_fail_stats * $dev)
#if $len($prov['fails'])
<div class="item"><a href="#$prov['prov_id']-section-$n" rel="noreferrer"><img height="16px" src="$sbRoot/images/providers/$prov['prov_img']">$prov['name']</a></div>
#end if
#end for
</span>
</div>
</div>
#end if
<p id="bubble-after">When a provider cannot be contacted over a period, SickGear backs off and waits an increasing interval between each retry</p>
#for $n, $prov in enumerate($provider_fail_stats * $dev)
#if $len($prov['fails'])
<!-- $prov['name'] -->
<div>
<div class="component-group bubble#if $n + 1 == $len($provider_fail_stats * $dev)# last#end if#">
<div name="$prov['prov_id']-section-$n" style="text-align:left">
#set $prov_class = '<span %sstyle="vertical-align:middle">'
#if not $prov['active']
#set $prov_class = $prov_class % 'class="grey-text" '
#else
#set $prov_class = $prov_class % ''
#end if
<input type="button" class="shows-more btn" value="Expand" style="display:none"><input type="button" class="shows-less btn" value="Collapse"><img src="$sbRoot/images/providers/$prov['prov_img']" width="16" height="16" style="margin:0 6px 0 3px">$prov_class$prov['name']
<input type="button" class="shows-more btn" value="Expand" style="display:none"><input type="button" class="shows-less btn" value="Collapse"><img src="$sbRoot/images/providers/$prov['prov_img']" width="16" height="16" style="margin:0 6px 0 3px">$prov_class$prov['name']
#if $prov['active']
#if $prov['next_try']
#set nt = $str($prov['next_try']).split('.', 2)[0][::-1].replace(':', ' m', 1).replace(':', ' h', 1)[::-1]
... is paused until $sbdatetime.sbdatetime.sbftime($sbdatetime.sbdatetime.now() + $prov['next_try'], markup=True) (in ${nt}s) <input type="button" class="provider-retry btn" id="$prov['prov_id']-btn-retry" value="Ignore pause on next search">
... is paused until $sbdatetime.sbdatetime.sbftime($sbdatetime.sbdatetime.now() + $prov['next_try'], markup=True) (in ${nt}s) <input type="button" class="provider-retry btn" id="$prov['prov_id']-btn-retry" value="Ignore pause on next search">
#end if
#else
... is not enabled
... is not enabled
#end if
</span>
</div>
<table class="manageTable provider-failures tablesorter hover-highlight focus-highlight text-center" cellspacing="0" border="0" cellpadding="0">
<thead>
<tr>
<th class="text-center" style="width:13em;padding-right:20px">period of 1hr</th>
<th class="text-center" style="padding-right:20px">server/timeout</th>
<th class="text-center" style="padding-right:20px">network</th>
<th class="text-center" style="padding-right:20px">no data</th>
<th class="text-center" style="padding-right:20px">other</th>
</span>
</div>
<table class="manageTable provider-failures tablesorter hover-highlight focus-highlight text-center" cellspacing="0" border="0" cellpadding="0">
<thead>
<tr>
<th class="text-center" style="width:13em;padding-right:20px">period of 1hr</th>
<th class="text-center" style="padding-right:20px">server/timeout</th>
<th class="text-center" style="padding-right:20px">network</th>
<th class="text-center" style="padding-right:20px">no data</th>
<th class="text-center" style="padding-right:20px">other</th>
#if $prov['has_limit']
<th class="text-center" style="padding-right:20px">hit limit</th>
#end if
</tr>
</thead>
<tbody>
</tr>
</thead>
<tbody>
#set $day = []
#for $fail in $prov['fails']
#set $child = True
@ -637,33 +671,33 @@
#set $child = False
#end if
#slurp#
<tr#if $fail['multirow'] and $child# class="tablesorter-childRow"#end if#>
<tr#if $fail['multirow'] and $child# class="tablesorter-childRow"#end if#>
#if $fail['multirow']
#if not $child
<td><a href="#" class="provider-fail-parent-toggle" title="Totals (expand for detail)">$sbdatetime.sbdatetime.sbfdate($fail['date_time'])</a></td>
<td><a href="#" class="provider-fail-parent-toggle" title="Totals (expand for detail)">$sbdatetime.sbdatetime.sbfdate($fail['date_time'])</a></td>
#else
<td>$sbdatetime.sbdatetime.sbftime($fail['date_time'], markup=True)</td>
<td>$sbdatetime.sbdatetime.sbftime($fail['date_time'], markup=True)</td>
#end if
#else
<td>$sbdatetime.sbdatetime.sbfdatetime($fail['date_time'], markup=True)</td>
<td>$sbdatetime.sbdatetime.sbfdatetime($fail['date_time'], markup=True)</td>
#end if
#set $blank = '-'
#set $title=None
#if $fail['http']['count']
#set $title=$fail['http']['code']
#end if
<td>#if $fail['http']['count']#<span title="#if $child or not $fail['multirow']#$title#else#Expand for fail codes#end if#">$fail['http']['count']</span>#else#$blank#end if# / #echo $fail['timeout'].get('count', 0) or $blank#</td>
<td>#echo ($fail['connection'].get('count', 0) + $fail['connection_timeout'].get('count', 0)) or $blank#</td>
<td>#echo $fail['nodata'].get('count', 0) or $blank#</td>
<td>#echo $fail['other'].get('count', 0) or $blank#</td>
<td>#if $fail['http']['count']#<span title="#if $child or not $fail['multirow']#$title#else#Expand for fail codes#end if#">$fail['http']['count']</span>#else#$blank#end if# / #echo $fail['timeout'].get('count', 0) or $blank#</td>
<td>#echo ($fail['connection'].get('count', 0) + $fail['connection_timeout'].get('count', 0)) or $blank#</td>
<td>#echo $fail['nodata'].get('count', 0) or $blank#</td>
<td>#echo $fail['other'].get('count', 0) or $blank#</td>
#if $prov['has_limit']
<td>#echo $fail.get('limit', {}).get('count', 0) or $blank#</td>
<td>#echo $fail.get('limit', {}).get('count', 0) or $blank#</td>
#end if
</tr>
</tr>
#end for
</tbody>
</table>
<!-- /$prov['name'] -->
</tbody>
</table>
</div><!-- /$prov['name'] -->
#end if
#end for
#end if

View file

@ -61,7 +61,17 @@
#pass
#end try
<div class="field-pair #if $sg_str('SHOWLIST_TAGVIEW') != 'custom' then 'hidden' else ''#" style="margin-top:10px">
<div class="field-pair alt" style="margin-top:10px">
<label for="prune">
<span class="component-title">Keep up to</span>
<span class="component-desc">
<input type="text" name="prune" id="prune" value="" class="form-control form-control-inline input-sm input75" style="width:50px">
<p>most recent downloads (blank for all)</p>
</span>
</label>
</div>
<div class="field-pair #if $sg_str('SHOWLIST_TAGVIEW') != 'custom' then 'hidden' else ''#">
<label for="tag">
<span class="component-title">Place show in group</span>
<span class="component-desc">

View file

@ -169,8 +169,16 @@
</div><br />
</div>
<div class="optionWrapper" >
<span class="selectTitle">Keep up to <span class="grey-text">(0 = no prune)</span></span>
<div class="selectChoices">
<input type="text" name="prune" id="prune" value="#if None is not $prune_value then $prune_value else ''#" class="form-control form-control-inline input-sm input75" style="width:50px;margin-top:0">
<span>blank = no change</span>
</div><br />
</div>
<div class="optionWrapper #if $sg_str('SHOWLIST_TAGVIEW') != 'custom' then 'hidden' else ''#">
<span class="selectTitle">Show is grouped in</span>
<span class="selectTitle">Place show in group</span>
<div class="selectChoices">
<select id="edit_tag" name="tag" class="form-control form-control-inline input-sm">
<option value="keep">&lt; keep &gt;</option>

View file

@ -5,13 +5,6 @@
$(document).ready(function(){
var loading = '<img src="' + sbRoot + '/images/loading16' + themeSpinner + '.gif" height="16" width="16" />';
$('.typelist').on('click', '.list .item a', function(){
$(this).closest('.component-group').after(
$('[name=' + $(this).attr('href').replace('#','') + ']').closest('.component-group')
);
return !1;
});
$('#test-growl').click(function () {
var growlHost = $.trim($('#growl-host').val());
var growlPassword = $.trim($('#growl-password').val());

View file

@ -186,6 +186,49 @@ $(document).ready(function() {
$.SickGear.sumChecked();
});
function updown(data){
var result = ': <span class="grey-text">failed to test site, oh the irony!</span>';
if(!(/undefined/i.test(data))) {
// noinspection JSUnresolvedVariable
var resp = data.last_down;
if (!(/undefined/i.test(resp))) {
result = ': <span class="grey-text"> yes it\'s <span class="box-green">up</span> and was last down ' + resp + ' ago</span>';
} else {
// noinspection JSUnresolvedVariable
resp = data.down_for;
if (!(/undefined/i.test(resp))) {
result = ': <span class="red-text">no, it\'s been <span class="box-red">down</span> for ~' + resp + '</span>';
}
}
}
return result;
}
function check_site(clicked){
var that = $(clicked), el$=$(that.parent());
that.attr('disabled', !0);
$.ajax({
url: $.SickGear.Root + '/history/check_site/?site_name=' + el$.attr('data-check'),
type: 'GET',
dataType: 'json',
complete: function (data) {
// noinspection JSUnresolvedVariable
el$.find('.result').html(updown(data.responseJSON));
el$.find('a').show();
that.attr('disabled', !1);
}
});
}
$.each(['tvdb', 'thexem', 'github'], function(i, el_id){
$('#check-' + el_id).find('input').click(function(){
check_site(this);
});
});
$('.shows-less').click(function(){
var table$ = $(this).nextAll('table:first');
table$ = table$.length ? table$ : $(this).parent().nextAll('table:first');

View file

@ -42,4 +42,17 @@ $(document).ready(function(){
$('#NAV' + topmenu).addClass('active');
$('.dropdown-toggle').dropdownHover();
(/undefined/i.test(document.createElement('input').placeholder)) && $('body').addClass('no-placeholders');
$('.bubblelist').on('click', '.list .item a', function(){
var bubbleAfter$ = $('#bubble-after'),
lastBubble$ = $('.bubble.last'), toBubble = $(this).attr('href').replace('#', ''),
doLast = (lastBubble$.length && toBubble === lastBubble$.find('div[name*="section"]').attr('name'));
doLast && lastBubble$.removeClass('last');
(bubbleAfter$.length && bubbleAfter$ || $(this).closest('.component-group')).after(
$('[name=' + $(this).attr('href').replace('#','') + ']').closest('.component-group')
);
doLast && $('.bubble').last().addClass('last');
return !1;
});
});

View file

@ -3,6 +3,7 @@ import logging
import random
import re
from requests.sessions import Session
from requests.models import Response
import js2py
from copy import deepcopy
@ -40,7 +41,8 @@ class CloudflareScraper(Session):
resp = super(CloudflareScraper, self).request(method, url, *args, **kwargs)
# Check if Cloudflare anti-bot is on
if (503 == resp.status_code
if (isinstance(resp, type(Response())) and isinstance(resp.headers.get('Server'), basestring)
and 503 == resp.status_code
and re.search('(?i)cloudflare', resp.headers.get('Server'))
and b'jschl_vc' in resp.content
and b'jschl_answer' in resp.content):

View file

@ -47,6 +47,7 @@ import threading
import time
import traceback
import math
import random
from tornado.concurrent import Future, is_future, chain_future, future_set_exc_info, future_add_done_callback # noqa: E501
from tornado.log import app_log, gen_log
@ -1161,6 +1162,14 @@ class PeriodicCallback(object):
Note that the timeout is given in milliseconds, while most other
time-related functions in Tornado use seconds.
If ``jitter`` is specified, each callback time will be randomly selected
within a window of ``jitter * callback_time`` milliseconds.
Jitter can be used to reduce alignment of events with similar periods.
A jitter of 0.1 means allowing a 10% variation in callback time.
The window is centered on ``callback_time`` so the total number of calls
within a given interval should not be significantly affected by adding
jitter.
If the callback runs for longer than ``callback_time`` milliseconds,
subsequent invocations will be skipped to get back on schedule.
@ -1168,12 +1177,16 @@ class PeriodicCallback(object):
.. versionchanged:: 5.0
The ``io_loop`` argument (deprecated since version 4.1) has been removed.
.. versionchanged:: 5.1
The ``jitter`` argument is added.
"""
def __init__(self, callback, callback_time):
def __init__(self, callback, callback_time, jitter=0):
self.callback = callback
if callback_time <= 0:
raise ValueError("Periodic callback must have a positive callback_time")
self.callback_time = callback_time
self.jitter = jitter
self._running = False
self._timeout = None
@ -1218,6 +1231,9 @@ class PeriodicCallback(object):
def _update_next(self, current_time):
callback_time_sec = self.callback_time / 1000.0
if self.jitter:
# apply jitter fraction
callback_time_sec *= 1 + (self.jitter * (random.random() - 0.5))
if self._next_timeout <= current_time:
# The period should be measured from the start of one call
# to the start of the next. If one call takes too long,

View file

@ -2825,6 +2825,7 @@ class FallbackHandler(RequestHandler):
def prepare(self):
self.fallback(self.request)
self._finished = True
self.on_finish()
class OutputTransform(object):

View file

@ -19,7 +19,6 @@ the protocol (known as "draft 76") and are not compatible with this module.
from __future__ import absolute_import, division, print_function
import base64
import collections
import hashlib
import os
import struct
@ -34,6 +33,7 @@ from tornado.ioloop import IOLoop, PeriodicCallback
from tornado.iostream import StreamClosedError
from tornado.log import gen_log, app_log
from tornado import simple_httpclient
from tornado.queues import Queue
from tornado.tcpclient import TCPClient
from tornado.util import _websocket_mask, PY3
@ -1096,8 +1096,7 @@ class WebSocketClientConnection(simple_httpclient._HTTPConnection):
self.compression_options = compression_options
self.connect_future = Future()
self.protocol = None
self.read_future = None
self.read_queue = collections.deque()
self.read_queue = Queue(1)
self.key = base64.b64encode(os.urandom(16))
self._on_message_callback = on_message_callback
self.close_code = self.close_reason = None
@ -1207,12 +1206,8 @@ class WebSocketClientConnection(simple_httpclient._HTTPConnection):
is given it will be called with the future when it is
ready.
"""
assert self.read_future is None
future = Future()
if self.read_queue:
future_set_result_unless_cancelled(future, self.read_queue.popleft())
else:
self.read_future = future
future = self.read_queue.get()
if callback is not None:
self.io_loop.add_future(future, callback)
return future
@ -1220,11 +1215,8 @@ class WebSocketClientConnection(simple_httpclient._HTTPConnection):
def on_message(self, message):
if self._on_message_callback:
self._on_message_callback(message)
elif self.read_future is not None:
future_set_result_unless_cancelled(self.read_future, message)
self.read_future = None
else:
self.read_queue.append(message)
return self.read_queue.put(message)
def ping(self, data=b''):
"""Send ping frame to the remote end.

View file

@ -122,6 +122,7 @@ class Show(dict):
def __init__(self):
dict.__init__(self)
self.data = {}
self.ep_loaded = False
def __repr__(self):
return '<Show %r (containing %s seasons)>' % (self.data.get(u'seriesname', 'instance'), len(self))
@ -886,6 +887,9 @@ class Tvdb:
for k, v in show_data['data'].iteritems():
self._set_show_data(sid, k, v)
if sid in self.shows:
self.shows[sid].ep_loaded = get_ep_info
p = ''
if self.config['posters_enabled']:
poster_data = self._getetsrc(self.config['url_seriesBanner'] % (sid, 'poster'), language=language)
@ -1022,7 +1026,7 @@ class Tvdb:
if isinstance(key, (int, long)):
# Item is integer, treat as show id
if key not in self.shows:
if key not in self.shows or (not self.shows[key].ep_loaded and arg in (None, True)):
self._get_show_data(key, self.config['language'], (True, arg)[arg is not None])
return None if key not in self.shows else self.shows[key]

View file

@ -44,6 +44,7 @@ from sickbeard.common import SD, SKIPPED
from sickbeard.databases import mainDB, cache_db, failed_db
from sickbeard.exceptions import ex
from sickbeard.providers.generic import GenericProvider
from sickbeard.providers.newznab import NewznabConstants
from sickbeard.watchedstate import EmbyWatchedStateUpdater, PlexWatchedStateUpdater
from indexers.indexer_config import INDEXER_TVDB
from indexers.indexer_api import indexerApi
@ -267,6 +268,7 @@ NZBGET_HOST = None
NZBGET_USE_HTTPS = False
NZBGET_PRIORITY = 100
NZBGET_SCRIPT_VERSION = None
NZBGET_MAP = None
SAB_USERNAME = None
SAB_PASSWORD = None
@ -278,7 +280,7 @@ TORRENT_USERNAME = None
TORRENT_PASSWORD = None
TORRENT_HOST = ''
TORRENT_PATH = ''
TORRENT_SEED_TIME = None
TORRENT_SEED_TIME = 0
TORRENT_PAUSED = False
TORRENT_HIGH_BANDWIDTH = False
TORRENT_LABEL = ''
@ -290,7 +292,7 @@ EMBY_PARENT_MAPS = None
EMBY_HOST = None
EMBY_APIKEY = None
EMBY_WATCHEDSTATE_SCHEDULED = False
EMBY_WATCHEDSTATE_FREQUENCY = None
EMBY_WATCHEDSTATE_FREQUENCY = 0
USE_KODI = False
KODI_ALWAYS_ON = True
@ -316,7 +318,7 @@ PLEX_HOST = None
PLEX_USERNAME = None
PLEX_PASSWORD = None
PLEX_WATCHEDSTATE_SCHEDULED = False
PLEX_WATCHEDSTATE_FREQUENCY = None
PLEX_WATCHEDSTATE_FREQUENCY = 0
USE_XBMC = False
XBMC_ALWAYS_ON = True
@ -429,7 +431,7 @@ DISCORDAPP_NOTIFY_ONSUBTITLEDOWNLOAD = False
DISCORDAPP_AS_AUTHED = False
DISCORDAPP_USERNAME = None
DISCORDAPP_ICON_URL = None
DISCORDAPP_AS_TTS = None
DISCORDAPP_AS_TTS = 0
DISCORDAPP_ACCESS_TOKEN = None
USE_GITTER = False
@ -448,7 +450,7 @@ TWITTER_PASSWORD = None
TWITTER_PREFIX = None
USE_EMAIL = False
EMAIL_OLD_SUBJECTS = None
EMAIL_OLD_SUBJECTS = False
EMAIL_NOTIFY_ONSNATCH = False
EMAIL_NOTIFY_ONDOWNLOAD = False
EMAIL_NOTIFY_ONSUBTITLEDOWNLOAD = False
@ -614,7 +616,7 @@ def initialize(console_logging=True):
# Search Settings/NZB search
global USE_NZBS, NZB_METHOD, NZB_DIR, SAB_HOST, SAB_USERNAME, SAB_PASSWORD, SAB_APIKEY, SAB_CATEGORY, \
NZBGET_USE_HTTPS, NZBGET_HOST, NZBGET_USERNAME, NZBGET_PASSWORD, NZBGET_CATEGORY, NZBGET_PRIORITY, \
NZBGET_SCRIPT_VERSION
NZBGET_SCRIPT_VERSION, NZBGET_MAP
# Search Settings/Torrent search
global USE_TORRENTS, TORRENT_METHOD, TORRENT_DIR, TORRENT_HOST, TORRENT_USERNAME, TORRENT_PASSWORD, \
TORRENT_LABEL, TORRENT_PATH, TORRENT_SEED_TIME, TORRENT_PAUSED, TORRENT_HIGH_BANDWIDTH, TORRENT_VERIFY_CERT
@ -689,7 +691,7 @@ def initialize(console_logging=True):
if __INITIALIZED__:
return False
for stanza in ('General', 'Blackhole', 'SABnzbd', 'NZBget', 'Emby', 'Kodi', 'XBMC', 'PLEX',
for stanza in ('General', 'Blackhole', 'SABnzbd', 'NZBGet', 'Emby', 'Kodi', 'XBMC', 'PLEX',
'Growl', 'Prowl', 'Twitter', 'Slack', 'Discordapp', 'Boxcar2', 'NMJ', 'NMJv2',
'Synology', 'SynologyNotifier',
'pyTivo', 'Pushalot', 'Pushbullet', 'Subtitles'):
@ -917,12 +919,21 @@ def initialize(console_logging=True):
SAB_CATEGORY = check_setting_str(CFG, 'SABnzbd', 'sab_category', 'tv')
SAB_HOST = check_setting_str(CFG, 'SABnzbd', 'sab_host', '')
NZBGET_USERNAME = check_setting_str(CFG, 'NZBget', 'nzbget_username', 'nzbget')
NZBGET_PASSWORD = check_setting_str(CFG, 'NZBget', 'nzbget_password', 'tegbzn6789')
NZBGET_CATEGORY = check_setting_str(CFG, 'NZBget', 'nzbget_category', 'tv')
NZBGET_HOST = check_setting_str(CFG, 'NZBget', 'nzbget_host', '')
NZBGET_USE_HTTPS = bool(check_setting_int(CFG, 'NZBget', 'nzbget_use_https', 0))
NZBGET_PRIORITY = check_setting_int(CFG, 'NZBget', 'nzbget_priority', 100)
# first check using official name case, then with case of legacy
# todo: migrate config, (just not atm due to testing map feature)
NZBGET_USERNAME = (check_setting_str(CFG, 'NZBGet', 'nzbget_username', '')
or check_setting_str(CFG, 'NZBget', 'nzbget_username', 'nzbget'))
NZBGET_PASSWORD = (check_setting_str(CFG, 'NZBGet', 'nzbget_password', '')
or check_setting_str(CFG, 'NZBget', 'nzbget_password', 'tegbzn6789'))
NZBGET_CATEGORY = (check_setting_str(CFG, 'NZBGet', 'nzbget_category', '')
or check_setting_str(CFG, 'NZBget', 'nzbget_category', 'tv'))
NZBGET_HOST = (check_setting_str(CFG, 'NZBGet', 'nzbget_host', '')
or check_setting_str(CFG, 'NZBget', 'nzbget_host', ''))
NZBGET_USE_HTTPS = (bool(check_setting_int(CFG, 'NZBGet', 'nzbget_use_https', 0))
or bool(check_setting_int(CFG, 'NZBget', 'nzbget_use_https', 0)))
NZBGET_PRIORITY = (check_setting_int(CFG, 'NZBGet', 'nzbget_priority', 0)
or check_setting_int(CFG, 'NZBget', 'nzbget_priority', 100))
NZBGET_MAP = check_setting_str(CFG, 'NZBGet', 'nzbget_map', '')
try:
ng_script_file = ek.ek(os.path.join, ek.ek(os.path.dirname, ek.ek(os.path.dirname, __file__)),
@ -1218,83 +1229,79 @@ def initialize(console_logging=True):
if GenericProvider.TORRENT == curProvider.providerType]:
prov_id = torrent_prov.get_id()
prov_id_uc = torrent_prov.get_id().upper()
torrent_prov.enabled = bool(check_setting_int(CFG, prov_id_uc, prov_id, 0))
torrent_prov.enabled = bool(check_setting_int(CFG, prov_id_uc, prov_id, False))
# check str with a def of list, don't add to block settings
if getattr(torrent_prov, 'url_edit', None):
torrent_prov.url_home = check_setting_str(CFG, prov_id_uc, prov_id + '_url_home', [])
if hasattr(torrent_prov, 'api_key'):
torrent_prov.api_key = check_setting_str(CFG, prov_id_uc, prov_id + '_api_key', '')
if hasattr(torrent_prov, 'hash'):
torrent_prov.hash = check_setting_str(CFG, prov_id_uc, prov_id + '_hash', '')
if hasattr(torrent_prov, 'digest'):
torrent_prov.digest = check_setting_str(CFG, prov_id_uc, prov_id + '_digest', '')
for user_type in ['username', 'uid']:
if hasattr(torrent_prov, user_type):
setattr(torrent_prov, user_type,
check_setting_str(CFG, prov_id_uc, '%s_%s' % (prov_id, user_type), ''))
if hasattr(torrent_prov, 'password'):
torrent_prov.password = check_setting_str(CFG, prov_id_uc, prov_id + '_password', '')
if hasattr(torrent_prov, 'passkey'):
torrent_prov.passkey = check_setting_str(CFG, prov_id_uc, prov_id + '_passkey', '')
if hasattr(torrent_prov, 'confirmed'):
torrent_prov.confirmed = bool(check_setting_int(CFG, prov_id_uc, prov_id + '_confirmed', 0))
if hasattr(torrent_prov, 'options'):
torrent_prov.options = check_setting_str(CFG, prov_id_uc, prov_id + '_options', '')
if hasattr(torrent_prov, '_seed_ratio'):
torrent_prov._seed_ratio = check_setting_str(CFG, prov_id_uc, prov_id + '_seed_ratio', '')
if hasattr(torrent_prov, 'seed_time'):
torrent_prov.seed_time = check_setting_int(CFG, prov_id_uc, prov_id + '_seed_time', '')
if hasattr(torrent_prov, 'minseed'):
torrent_prov.minseed = check_setting_int(CFG, prov_id_uc, prov_id + '_minseed', 0)
if hasattr(torrent_prov, 'minleech'):
torrent_prov.minleech = check_setting_int(CFG, prov_id_uc, prov_id + '_minleech', 0)
if hasattr(torrent_prov, 'freeleech'):
torrent_prov.freeleech = bool(check_setting_int(CFG, prov_id_uc, prov_id + '_freeleech', 0))
if hasattr(torrent_prov, 'reject_m2ts'):
torrent_prov.reject_m2ts = bool(check_setting_int(CFG, prov_id_uc, prov_id + '_reject_m2ts', 0))
if hasattr(torrent_prov, 'enable_recentsearch'):
torrent_prov.enable_recentsearch = bool(check_setting_int(CFG, prov_id_uc,
prov_id + '_enable_recentsearch', 1)) or \
not getattr(torrent_prov, 'supports_backlog')
if hasattr(torrent_prov, 'enable_backlog'):
torrent_prov.enable_backlog = bool(check_setting_int(CFG, prov_id_uc, prov_id + '_enable_backlog', 1))
if hasattr(torrent_prov, 'enable_scheduled_backlog'):
torrent_prov.enable_scheduled_backlog = bool(check_setting_int(
CFG, prov_id_uc, prov_id + '_enable_scheduled_backlog', 1))
if hasattr(torrent_prov, 'search_mode'):
torrent_prov.search_mode = check_setting_str(CFG, prov_id_uc, prov_id + '_search_mode', 'eponly')
if hasattr(torrent_prov, 'search_fallback'):
torrent_prov.search_fallback = bool(check_setting_int(CFG, prov_id_uc, prov_id + '_search_fallback', 0))
# check int with a default of str, don't add to block settings
attr = 'seed_time'
if hasattr(torrent_prov, attr):
torrent_prov.seed_time = check_setting_int(CFG, prov_id_uc, '%s_%s' % (prov_id, attr), '')
# custom cond, don't add to block settings
attr = 'enable_recentsearch'
if hasattr(torrent_prov, attr):
torrent_prov.enable_recentsearch = bool(check_setting_int(
CFG, prov_id_uc, '%s_%s' % (prov_id, attr), True)) or not getattr(torrent_prov, 'supports_backlog')
# check str with a default of list, don't add to block settings
if hasattr(torrent_prov, 'filter'):
torrent_prov.filter = check_setting_str(CFG, prov_id_uc, prov_id + '_filter', [])
for (attr, default) in [
('enable_backlog', True), ('enable_scheduled_backlog', True),
('api_key', ''), ('hash', ''), ('digest', ''),
('username', ''), ('uid', ''), ('password', ''), ('passkey', ''),
('options', ''),
('_seed_ratio', ''), ('minseed', 0), ('minleech', 0),
('scene_only', False), ('scene_or_contain', ''), ('scene_loose', False), ('scene_loose_active', False),
('scene_rej_nuked', False), ('scene_nuked_active', False),
('freeleech', False), ('confirmed', False), ('reject_m2ts', False),
('search_mode', 'eponly'), ('search_fallback', False)
]:
if hasattr(torrent_prov, attr):
attr_check = '%s_%s' % (prov_id, attr.strip('_'))
if isinstance(default, bool):
setattr(torrent_prov, attr, bool(check_setting_int(CFG, prov_id_uc, attr_check, default)))
elif isinstance(default, basestring):
setattr(torrent_prov, attr, check_setting_str(CFG, prov_id_uc, attr_check, default))
elif isinstance(default, int):
setattr(torrent_prov, attr, check_setting_int(CFG, prov_id_uc, attr_check, default))
for nzb_prov in [curProvider for curProvider in providers.sortedProviderList()
if GenericProvider.NZB == curProvider.providerType]:
prov_id = nzb_prov.get_id()
prov_id_uc = nzb_prov.get_id().upper()
nzb_prov.enabled = bool(
check_setting_int(CFG, prov_id_uc, prov_id, 0))
if hasattr(nzb_prov, 'api_key'):
nzb_prov.api_key = check_setting_str(CFG, prov_id_uc, prov_id + '_api_key', '')
if hasattr(nzb_prov, 'username'):
nzb_prov.username = check_setting_str(CFG, prov_id_uc, prov_id + '_username', '')
if hasattr(nzb_prov, 'search_mode'):
nzb_prov.search_mode = check_setting_str(CFG, prov_id_uc, prov_id + '_search_mode', 'eponly')
if hasattr(nzb_prov, 'search_fallback'):
nzb_prov.search_fallback = bool(check_setting_int(CFG, prov_id_uc, prov_id + '_search_fallback', 0))
if hasattr(nzb_prov, 'enable_recentsearch'):
nzb_prov.enable_recentsearch = bool(check_setting_int(CFG, prov_id_uc,
prov_id + '_enable_recentsearch', 1)) or \
not getattr(nzb_prov, 'supports_backlog')
if hasattr(nzb_prov, 'enable_backlog'):
nzb_prov.enable_backlog = bool(check_setting_int(CFG, prov_id_uc, prov_id + '_enable_backlog', 1))
if hasattr(nzb_prov, 'enable_scheduled_backlog'):
nzb_prov.enable_scheduled_backlog = bool(check_setting_int(
CFG, prov_id_uc, prov_id + '_enable_scheduled_backlog', 1))
nzb_prov.enabled = bool(check_setting_int(CFG, prov_id_uc, prov_id, False))
attr = 'enable_recentsearch'
if hasattr(nzb_prov, attr):
nzb_prov.enable_recentsearch = bool(check_setting_int(
CFG, prov_id_uc, '%s_%s' % (prov_id, attr), True)) or not getattr(nzb_prov, 'supports_backlog')
for (attr, default) in [
('enable_backlog', True), ('enable_scheduled_backlog', True),
('api_key', ''), ('username', ''),
('scene_only', False), ('scene_or_contain', ''), ('scene_loose', False), ('scene_loose_active', False),
('scene_rej_nuked', False), ('scene_nuked_active', False),
('search_mode', 'eponly'), ('search_fallback', False), ('server_type', NewznabConstants.SERVER_DEFAULT)
]:
if hasattr(nzb_prov, attr):
attr_check = '%s_%s' % (prov_id, attr.strip('_'))
if isinstance(default, bool):
setattr(nzb_prov, attr, bool(check_setting_int(CFG, prov_id_uc, attr_check, default)))
elif isinstance(default, basestring):
setattr(nzb_prov, attr, check_setting_str(CFG, prov_id_uc, attr_check, default))
elif isinstance(default, int):
setattr(nzb_prov, attr, check_setting_int(CFG, prov_id_uc, attr_check, default))
if not os.path.isfile(CONFIG_FILE):
logger.log(u'Unable to find \'%s\', all settings will be default!' % CONFIG_FILE, logger.DEBUG)
save_config()
elif update_config:
update_config = True
if update_config:
save_config()
# start up all the threads
@ -1700,280 +1707,302 @@ def save_config():
new_config['General']['require_words'] = REQUIRE_WORDS
new_config['General']['calendar_unprotected'] = int(CALENDAR_UNPROTECTED)
new_config['Blackhole'] = {}
new_config['Blackhole']['nzb_dir'] = NZB_DIR
new_config['Blackhole']['torrent_dir'] = TORRENT_DIR
for src in [x for x in providers.sortedProviderList() if GenericProvider.TORRENT == x.providerType]:
src_id = src.get_id()
src_id_uc = src_id.upper()
new_config[src_id_uc] = {}
new_config[src_id_uc][src_id] = int(src.enabled)
if int(src.enabled):
new_config[src_id_uc][src_id] = int(src.enabled)
if getattr(src, 'url_edit', None):
new_config[src_id_uc][src_id + '_url_home'] = src.url_home
if hasattr(src, 'password'):
if getattr(src, 'password', None):
new_config[src_id_uc][src_id + '_password'] = helpers.encrypt(src.password, ENCRYPTION_VERSION)
for (setting, value) in [
('%s_%s' % (src_id, k), getattr(src, k, v) if not v else helpers.tryInt(getattr(src, k, None)))
for (attr, value) in [
(k, getattr(src, k, v) if not v else helpers.tryInt(getattr(src, k, None)))
for (k, v) in [
('enable_recentsearch', 1), ('enable_backlog', 1), ('enable_scheduled_backlog', 1),
('api_key', None), ('passkey', None), ('digest', None), ('hash', None), ('username', ''), ('uid', ''),
('minseed', 1), ('minleech', 1), ('confirmed', 1), ('freeleech', 1), ('reject_m2ts', 1),
('enable_recentsearch', 1), ('enable_backlog', 1), ('search_mode', None), ('search_fallback', 1),
('seed_time', None), ('enable_scheduled_backlog', 1)] if hasattr(src, k)]:
new_config[src_id_uc][setting] = value
('minseed', 1), ('minleech', 1), ('seed_time', None),
('confirmed', 1), ('freeleech', 1), ('reject_m2ts', 1),
('scene_only', None), ('scene_or_contain', ''), ('scene_loose', None), ('scene_loose_active', None),
('scene_rej_nuked', None), ('scene_nuked_active', None),
('search_mode', None), ('search_fallback', 1)
]
if hasattr(src, k)]:
if (value and not ('search_mode' == attr and 'eponly' == value)
# must allow the following to save '0' not '1' because default is enable (1) instead of disable (0)
and (attr not in ('enable_recentsearch', 'enable_backlog', 'enable_scheduled_backlog'))
or not value and (attr in ('enable_recentsearch', 'enable_backlog', 'enable_scheduled_backlog'))):
new_config[src_id_uc]['%s_%s' % (src_id, attr)] = value
if hasattr(src, '_seed_ratio'):
if getattr(src, '_seed_ratio', None):
new_config[src_id_uc][src_id + '_seed_ratio'] = src.seed_ratio()
if hasattr(src, 'filter'):
if getattr(src, 'filter', None):
new_config[src_id_uc][src_id + '_filter'] = src.filter
if not new_config[src_id_uc]:
del new_config[src_id_uc]
for src in [x for x in providers.sortedProviderList() if GenericProvider.NZB == x.providerType]:
src_id = src.get_id()
src_id_uc = src.get_id().upper()
new_config[src_id_uc] = {}
new_config[src_id_uc][src_id] = int(src.enabled)
if int(src.enabled):
new_config[src_id_uc][src_id] = int(src.enabled)
for attr in [x for x in ['api_key', 'username', 'search_mode'] if hasattr(src, x)]:
new_config[src_id_uc]['%s_%s' % (src_id, attr)] = getattr(src, attr)
for attr in [x for x in ['api_key', 'username', 'search_mode'] if getattr(src, x, None)]:
if 'search_mode' != attr or 'eponly' != getattr(src, attr):
new_config[src_id_uc]['%s_%s' % (src_id, attr)] = getattr(src, attr)
for attr in [x for x in ['enable_recentsearch', 'enable_backlog', 'search_fallback',
'enable_scheduled_backlog'] if hasattr(src, x)]:
new_config[src_id_uc]['%s_%s' % (src_id, attr)] = helpers.tryInt(getattr(src, attr, None))
for attr in [x for x in ['enable_recentsearch', 'enable_backlog', 'enable_scheduled_backlog',
'scene_only', 'scene_loose', 'scene_loose_active',
'scene_rej_nuked', 'scene_nuked_active',
'search_fallback', 'server_type']
if getattr(src, x, None)]:
value = helpers.tryInt(getattr(src, attr, None))
# must allow the following to save '0' not '1' because default is enable (1) instead of disable (0)
if (value and (attr not in ('enable_recentsearch', 'enable_backlog', 'enable_scheduled_backlog'))
or not value and (attr in ('enable_recentsearch', 'enable_backlog', 'enable_scheduled_backlog'))):
new_config[src_id_uc]['%s_%s' % (src_id, attr)] = value
new_config['SABnzbd'] = {}
new_config['SABnzbd']['sab_username'] = SAB_USERNAME
new_config['SABnzbd']['sab_password'] = helpers.encrypt(SAB_PASSWORD, ENCRYPTION_VERSION)
new_config['SABnzbd']['sab_apikey'] = SAB_APIKEY
new_config['SABnzbd']['sab_category'] = SAB_CATEGORY
new_config['SABnzbd']['sab_host'] = SAB_HOST
attr = 'scene_or_contain'
if getattr(src, attr, None):
new_config[src_id_uc]['%s_%s' % (src_id, attr)] = getattr(src, attr, '')
new_config['NZBget'] = {}
if not new_config[src_id_uc]:
del new_config[src_id_uc]
new_config['NZBget']['nzbget_username'] = NZBGET_USERNAME
new_config['NZBget']['nzbget_password'] = helpers.encrypt(NZBGET_PASSWORD, ENCRYPTION_VERSION)
new_config['NZBget']['nzbget_category'] = NZBGET_CATEGORY
new_config['NZBget']['nzbget_host'] = NZBGET_HOST
new_config['NZBget']['nzbget_use_https'] = int(NZBGET_USE_HTTPS)
new_config['NZBget']['nzbget_priority'] = NZBGET_PRIORITY
from collections import OrderedDict
cfg_keys = []
for (cfg, items) in OrderedDict([
# -----------------------------------
# Config/Search
# -----------------------------------
('Blackhole', [
('nzb_dir', NZB_DIR), ('torrent_dir', TORRENT_DIR)]),
('NZBGet', [
('username', NZBGET_USERNAME), ('password', helpers.encrypt(NZBGET_PASSWORD, ENCRYPTION_VERSION)),
('host', NZBGET_HOST),
('category', NZBGET_CATEGORY),
('use_https', int(NZBGET_USE_HTTPS)),
('priority', NZBGET_PRIORITY),
('map', NZBGET_MAP),
]),
('SABnzbd', [
('username', SAB_USERNAME), ('password', helpers.encrypt(SAB_PASSWORD, ENCRYPTION_VERSION)),
('apikey', SAB_APIKEY),
('host', SAB_HOST),
('category', SAB_CATEGORY),
]),
('TORRENT', [
('username', TORRENT_USERNAME), ('password', helpers.encrypt(TORRENT_PASSWORD, ENCRYPTION_VERSION)),
('host', TORRENT_HOST),
('path', TORRENT_PATH),
('seed_time', int(TORRENT_SEED_TIME)),
('paused', int(TORRENT_PAUSED)),
('high_bandwidth', int(TORRENT_HIGH_BANDWIDTH)),
('label', TORRENT_LABEL),
('verify_cert', int(TORRENT_VERIFY_CERT)),
]),
# -----------------------------------
# Config/Notifications
# -----------------------------------
('Emby', [
('use_%s', int(USE_EMBY)),
('apikey', EMBY_APIKEY), ('host', EMBY_HOST),
('update_library', int(EMBY_UPDATE_LIBRARY)),
('watchedstate_scheduled', int(EMBY_WATCHEDSTATE_SCHEDULED)),
('watchedstate_frequency', int(EMBY_WATCHEDSTATE_FREQUENCY)),
('parent_maps', EMBY_PARENT_MAPS),
]),
('Kodi', [
('use_%s', int(USE_KODI)),
('username', KODI_USERNAME), ('password', helpers.encrypt(KODI_PASSWORD, ENCRYPTION_VERSION)),
('host', KODI_HOST),
('always_on', int(KODI_ALWAYS_ON)), ('update_library', int(KODI_UPDATE_LIBRARY)),
('update_full', int(KODI_UPDATE_FULL)),
('update_onlyfirst', int(KODI_UPDATE_ONLYFIRST)),
('parent_maps', KODI_PARENT_MAPS),
]),
('Plex', [
('use_%s', int(USE_PLEX)),
('username', PLEX_USERNAME), ('password', helpers.encrypt(PLEX_PASSWORD, ENCRYPTION_VERSION)),
('host', PLEX_HOST),
('update_library', int(PLEX_UPDATE_LIBRARY)),
('watchedstate_scheduled', int(PLEX_WATCHEDSTATE_SCHEDULED)),
('watchedstate_frequency', int(PLEX_WATCHEDSTATE_FREQUENCY)),
('parent_maps', PLEX_PARENT_MAPS),
('server_host', PLEX_SERVER_HOST),
]),
('XBMC', [
('use_%s', int(USE_XBMC)),
('username', XBMC_USERNAME), ('password', helpers.encrypt(XBMC_PASSWORD, ENCRYPTION_VERSION)),
('host', XBMC_HOST),
('always_on', int(XBMC_ALWAYS_ON)), ('update_library', int(XBMC_UPDATE_LIBRARY)),
('update_full', int(XBMC_UPDATE_FULL)),
('update_onlyfirst', int(XBMC_UPDATE_ONLYFIRST)),
]),
('NMJ', [
('use_%s', int(USE_NMJ)),
('host', NMJ_HOST),
('database', NMJ_DATABASE),
('mount', NMJ_MOUNT),
]),
('NMJv2', [
('use_%s', int(USE_NMJv2)),
('host', NMJv2_HOST),
('database', NMJv2_DATABASE),
('dbloc', NMJv2_DBLOC),
]),
('Synology', [
('use_synoindex', int(USE_SYNOINDEX)),
]),
('SynologyNotifier', [
('use_%s', int(USE_SYNOLOGYNOTIFIER)),
]),
('pyTivo', [
('use_%s', int(USE_PYTIVO)),
('host', PYTIVO_HOST),
('share_name', PYTIVO_SHARE_NAME),
('tivo_name', PYTIVO_TIVO_NAME),
]),
('Boxcar2', [
('use_%s', int(USE_BOXCAR2)),
('accesstoken', BOXCAR2_ACCESSTOKEN),
('sound', BOXCAR2_SOUND if 'default' != BOXCAR2_SOUND else None),
]),
('Pushbullet', [
('use_%s', int(USE_PUSHBULLET)),
('access_token', PUSHBULLET_ACCESS_TOKEN),
('device_iden', PUSHBULLET_DEVICE_IDEN),
]),
('Pushover', [
('use_%s', int(USE_PUSHOVER)),
('userkey', PUSHOVER_USERKEY),
('apikey', PUSHOVER_APIKEY),
('priority', PUSHOVER_PRIORITY if '0' != PUSHOVER_PRIORITY else None),
('device', PUSHOVER_DEVICE if 'all' != PUSHOVER_DEVICE else None),
('sound', PUSHOVER_SOUND if 'pushover' != PUSHOVER_SOUND else None),
]),
('Growl', [
('use_%s', int(USE_GROWL)),
('host', GROWL_HOST),
('password', helpers.encrypt(GROWL_PASSWORD, ENCRYPTION_VERSION)),
]),
('Prowl', [
('use_%s', int(USE_PROWL)),
('api', PROWL_API),
('priority', PROWL_PRIORITY if '0' != PROWL_PRIORITY else None),
]),
('Libnotify', [
('use_%s', int(USE_LIBNOTIFY))
]),
# deprecated service
# new_config['Pushalot'] = {}
# new_config['Pushalot']['use_pushalot'] = int(USE_PUSHALOT)
# new_config['Pushalot']['pushalot_authorizationtoken'] = PUSHALOT_AUTHORIZATIONTOKEN
('Trakt', [
('use_%s', int(USE_TRAKT)),
('update_collection', TRAKT_UPDATE_COLLECTION and trakt_helpers.build_config_string(TRAKT_UPDATE_COLLECTION)),
('accounts', TraktAPI.build_config_string(TRAKT_ACCOUNTS)),
('mru', TRAKT_MRU),
# new_config['Trakt'] = {}
# new_config['Trakt']['trakt_remove_watchlist'] = int(TRAKT_REMOVE_WATCHLIST)
# new_config['Trakt']['trakt_remove_serieslist'] = int(TRAKT_REMOVE_SERIESLIST)
# new_config['Trakt']['trakt_use_watchlist'] = int(TRAKT_USE_WATCHLIST)
# new_config['Trakt']['trakt_method_add'] = int(TRAKT_METHOD_ADD)
# new_config['Trakt']['trakt_start_paused'] = int(TRAKT_START_PAUSED)
# new_config['Trakt']['trakt_sync'] = int(TRAKT_SYNC)
# new_config['Trakt']['trakt_default_indexer'] = int(TRAKT_DEFAULT_INDEXER)
]),
('Slack', [
('use_%s', int(USE_SLACK)),
('channel', SLACK_CHANNEL),
('as_authed', int(SLACK_AS_AUTHED)),
('bot_name', SLACK_BOT_NAME),
('icon_url', SLACK_ICON_URL),
('access_token', SLACK_ACCESS_TOKEN),
]),
('Discordapp', [
('use_%s', int(USE_DISCORDAPP)),
('as_authed', int(DISCORDAPP_AS_AUTHED)),
('username', DISCORDAPP_USERNAME),
('icon_url', DISCORDAPP_ICON_URL),
('as_tts', int(DISCORDAPP_AS_TTS)),
('access_token', DISCORDAPP_ACCESS_TOKEN),
]),
('Gitter', [
('use_%s', int(USE_GITTER)),
('room', GITTER_ROOM),
('access_token', GITTER_ACCESS_TOKEN),
]),
('Twitter', [
('use_%s', int(USE_TWITTER)),
('username', TWITTER_USERNAME), ('password', helpers.encrypt(TWITTER_PASSWORD, ENCRYPTION_VERSION)),
('prefix', TWITTER_PREFIX),
]),
('Email', [
('use_%s', int(USE_EMAIL)),
('old_subjects', int(EMAIL_OLD_SUBJECTS)),
('host', EMAIL_HOST), ('port', int(EMAIL_PORT) if 25 != int(EMAIL_PORT) else None),
('tls', int(EMAIL_TLS)),
('user', EMAIL_USER), ('password', helpers.encrypt(EMAIL_PASSWORD, ENCRYPTION_VERSION)),
('from', EMAIL_FROM),
('list', EMAIL_LIST),
]),
# (, [(, )]),
]).items():
cfg_lc = cfg.lower()
cfg_keys += [cfg]
new_config[cfg] = {}
for (k, v) in filter(lambda (_, y): any([y]), items):
k = '%s' in k and (k % cfg_lc) or (cfg_lc + '_' + k)
# correct for cases where keys are named in an inconsistent manner to parent stanza
k = k.replace('blackhole_', '').replace('sabnzbd_', 'sab_')
new_config[cfg].update({k: v})
new_config['TORRENT'] = {}
new_config['TORRENT']['torrent_username'] = TORRENT_USERNAME
new_config['TORRENT']['torrent_password'] = helpers.encrypt(TORRENT_PASSWORD, ENCRYPTION_VERSION)
new_config['TORRENT']['torrent_host'] = TORRENT_HOST
new_config['TORRENT']['torrent_path'] = TORRENT_PATH
new_config['TORRENT']['torrent_seed_time'] = int(TORRENT_SEED_TIME)
new_config['TORRENT']['torrent_paused'] = int(TORRENT_PAUSED)
new_config['TORRENT']['torrent_high_bandwidth'] = int(TORRENT_HIGH_BANDWIDTH)
new_config['TORRENT']['torrent_label'] = TORRENT_LABEL
new_config['TORRENT']['torrent_verify_cert'] = int(TORRENT_VERIFY_CERT)
for (notifier, onsnatch, ondownload, onsubtitledownload) in [
('Kodi', KODI_NOTIFY_ONSNATCH, KODI_NOTIFY_ONDOWNLOAD, KODI_NOTIFY_ONSUBTITLEDOWNLOAD),
('Plex', PLEX_NOTIFY_ONSNATCH, PLEX_NOTIFY_ONDOWNLOAD, PLEX_NOTIFY_ONSUBTITLEDOWNLOAD),
('XBMC', XBMC_NOTIFY_ONSNATCH, XBMC_NOTIFY_ONDOWNLOAD, XBMC_NOTIFY_ONSUBTITLEDOWNLOAD),
('SynologyNotifier', SYNOLOGYNOTIFIER_NOTIFY_ONSNATCH, SYNOLOGYNOTIFIER_NOTIFY_ONDOWNLOAD,
SYNOLOGYNOTIFIER_NOTIFY_ONSUBTITLEDOWNLOAD),
new_config['Emby'] = {}
new_config['Emby']['use_emby'] = int(USE_EMBY)
new_config['Emby']['emby_update_library'] = int(EMBY_UPDATE_LIBRARY)
new_config['Emby']['emby_parent_maps'] = EMBY_PARENT_MAPS
new_config['Emby']['emby_host'] = EMBY_HOST
new_config['Emby']['emby_apikey'] = EMBY_APIKEY
new_config['Emby']['emby_watchedstate_scheduled'] = int(EMBY_WATCHEDSTATE_SCHEDULED)
new_config['Emby']['emby_watchedstate_frequency'] = int(EMBY_WATCHEDSTATE_FREQUENCY)
('Boxcar2', BOXCAR2_NOTIFY_ONSNATCH, BOXCAR2_NOTIFY_ONDOWNLOAD, BOXCAR2_NOTIFY_ONSUBTITLEDOWNLOAD),
('Pushbullet', PUSHBULLET_NOTIFY_ONSNATCH, PUSHBULLET_NOTIFY_ONDOWNLOAD, PUSHBULLET_NOTIFY_ONSUBTITLEDOWNLOAD),
('Pushover', PUSHOVER_NOTIFY_ONSNATCH, PUSHOVER_NOTIFY_ONDOWNLOAD, PUSHOVER_NOTIFY_ONSUBTITLEDOWNLOAD),
('Growl', GROWL_NOTIFY_ONSNATCH, GROWL_NOTIFY_ONDOWNLOAD, GROWL_NOTIFY_ONSUBTITLEDOWNLOAD),
('Prowl', PROWL_NOTIFY_ONSNATCH, PROWL_NOTIFY_ONDOWNLOAD, PROWL_NOTIFY_ONSUBTITLEDOWNLOAD),
('Libnotify', LIBNOTIFY_NOTIFY_ONSNATCH, LIBNOTIFY_NOTIFY_ONDOWNLOAD, LIBNOTIFY_NOTIFY_ONSUBTITLEDOWNLOAD),
# ('Pushalot', PUSHALOT_NOTIFY_ONSNATCH, PUSHALOT_NOTIFY_ONDOWNLOAD, PUSHALOT_NOTIFY_ONSUBTITLEDOWNLOAD),
new_config['Kodi'] = {}
new_config['Kodi']['use_kodi'] = int(USE_KODI)
new_config['Kodi']['kodi_always_on'] = int(KODI_ALWAYS_ON)
new_config['Kodi']['kodi_update_library'] = int(KODI_UPDATE_LIBRARY)
new_config['Kodi']['kodi_update_full'] = int(KODI_UPDATE_FULL)
new_config['Kodi']['kodi_update_onlyfirst'] = int(KODI_UPDATE_ONLYFIRST)
new_config['Kodi']['kodi_parent_maps'] = KODI_PARENT_MAPS
new_config['Kodi']['kodi_host'] = KODI_HOST
new_config['Kodi']['kodi_username'] = KODI_USERNAME
new_config['Kodi']['kodi_password'] = helpers.encrypt(KODI_PASSWORD, ENCRYPTION_VERSION)
new_config['Kodi']['kodi_notify_onsnatch'] = int(KODI_NOTIFY_ONSNATCH)
new_config['Kodi']['kodi_notify_ondownload'] = int(KODI_NOTIFY_ONDOWNLOAD)
new_config['Kodi']['kodi_notify_onsubtitledownload'] = int(KODI_NOTIFY_ONSUBTITLEDOWNLOAD)
('Slack', SLACK_NOTIFY_ONSNATCH, SLACK_NOTIFY_ONDOWNLOAD, SLACK_NOTIFY_ONSUBTITLEDOWNLOAD),
('Discordapp', DISCORDAPP_NOTIFY_ONSNATCH, DISCORDAPP_NOTIFY_ONDOWNLOAD, DISCORDAPP_NOTIFY_ONSUBTITLEDOWNLOAD),
('Gitter', GITTER_NOTIFY_ONSNATCH, GITTER_NOTIFY_ONDOWNLOAD, GITTER_NOTIFY_ONSUBTITLEDOWNLOAD),
('Twitter', TWITTER_NOTIFY_ONSNATCH, TWITTER_NOTIFY_ONDOWNLOAD, TWITTER_NOTIFY_ONSUBTITLEDOWNLOAD),
('Email', EMAIL_NOTIFY_ONSNATCH, EMAIL_NOTIFY_ONDOWNLOAD, EMAIL_NOTIFY_ONSUBTITLEDOWNLOAD),
]:
if any([onsnatch, ondownload, onsubtitledownload]):
if onsnatch:
new_config[notifier]['%s_notify_onsnatch' % notifier.lower()] = int(onsnatch)
if ondownload:
new_config[notifier]['%s_notify_ondownload' % notifier.lower()] = int(ondownload)
if onsubtitledownload:
new_config[notifier]['%s_notify_onsubtitledownload' % notifier.lower()] = int(onsubtitledownload)
new_config['Plex'] = {}
new_config['Plex']['use_plex'] = int(USE_PLEX)
new_config['Plex']['plex_username'] = PLEX_USERNAME
new_config['Plex']['plex_password'] = helpers.encrypt(PLEX_PASSWORD, ENCRYPTION_VERSION)
new_config['Plex']['plex_update_library'] = int(PLEX_UPDATE_LIBRARY)
new_config['Plex']['plex_parent_maps'] = PLEX_PARENT_MAPS
new_config['Plex']['plex_server_host'] = PLEX_SERVER_HOST
new_config['Plex']['plex_notify_onsnatch'] = int(PLEX_NOTIFY_ONSNATCH)
new_config['Plex']['plex_notify_ondownload'] = int(PLEX_NOTIFY_ONDOWNLOAD)
new_config['Plex']['plex_notify_onsubtitledownload'] = int(PLEX_NOTIFY_ONSUBTITLEDOWNLOAD)
new_config['Plex']['plex_host'] = PLEX_HOST
new_config['Plex']['plex_watchedstate_scheduled'] = int(PLEX_WATCHEDSTATE_SCHEDULED)
new_config['Plex']['plex_watchedstate_frequency'] = int(PLEX_WATCHEDSTATE_FREQUENCY)
new_config['XBMC'] = {}
new_config['XBMC']['use_xbmc'] = int(USE_XBMC)
new_config['XBMC']['xbmc_always_on'] = int(XBMC_ALWAYS_ON)
new_config['XBMC']['xbmc_update_library'] = int(XBMC_UPDATE_LIBRARY)
new_config['XBMC']['xbmc_update_full'] = int(XBMC_UPDATE_FULL)
new_config['XBMC']['xbmc_update_onlyfirst'] = int(XBMC_UPDATE_ONLYFIRST)
new_config['XBMC']['xbmc_notify_onsnatch'] = int(XBMC_NOTIFY_ONSNATCH)
new_config['XBMC']['xbmc_notify_ondownload'] = int(XBMC_NOTIFY_ONDOWNLOAD)
new_config['XBMC']['xbmc_notify_onsubtitledownload'] = int(XBMC_NOTIFY_ONSUBTITLEDOWNLOAD)
new_config['XBMC']['xbmc_host'] = XBMC_HOST
new_config['XBMC']['xbmc_username'] = XBMC_USERNAME
new_config['XBMC']['xbmc_password'] = helpers.encrypt(XBMC_PASSWORD, ENCRYPTION_VERSION)
new_config['NMJ'] = {}
new_config['NMJ']['use_nmj'] = int(USE_NMJ)
new_config['NMJ']['nmj_host'] = NMJ_HOST
new_config['NMJ']['nmj_database'] = NMJ_DATABASE
new_config['NMJ']['nmj_mount'] = NMJ_MOUNT
new_config['NMJv2'] = {}
new_config['NMJv2']['use_nmjv2'] = int(USE_NMJv2)
new_config['NMJv2']['nmjv2_host'] = NMJv2_HOST
new_config['NMJv2']['nmjv2_database'] = NMJv2_DATABASE
new_config['NMJv2']['nmjv2_dbloc'] = NMJv2_DBLOC
new_config['Synology'] = {}
new_config['Synology']['use_synoindex'] = int(USE_SYNOINDEX)
new_config['SynologyNotifier'] = {}
new_config['SynologyNotifier']['use_synologynotifier'] = int(USE_SYNOLOGYNOTIFIER)
new_config['SynologyNotifier']['synologynotifier_notify_onsnatch'] = int(SYNOLOGYNOTIFIER_NOTIFY_ONSNATCH)
new_config['SynologyNotifier']['synologynotifier_notify_ondownload'] = int(SYNOLOGYNOTIFIER_NOTIFY_ONDOWNLOAD)
new_config['SynologyNotifier']['synologynotifier_notify_onsubtitledownload'] = int(
SYNOLOGYNOTIFIER_NOTIFY_ONSUBTITLEDOWNLOAD)
new_config['pyTivo'] = {}
new_config['pyTivo']['use_pytivo'] = int(USE_PYTIVO)
new_config['pyTivo']['pytivo_host'] = PYTIVO_HOST
new_config['pyTivo']['pytivo_share_name'] = PYTIVO_SHARE_NAME
new_config['pyTivo']['pytivo_tivo_name'] = PYTIVO_TIVO_NAME
new_config['Boxcar2'] = {}
new_config['Boxcar2']['use_boxcar2'] = int(USE_BOXCAR2)
new_config['Boxcar2']['boxcar2_notify_onsnatch'] = int(BOXCAR2_NOTIFY_ONSNATCH)
new_config['Boxcar2']['boxcar2_notify_ondownload'] = int(BOXCAR2_NOTIFY_ONDOWNLOAD)
new_config['Boxcar2']['boxcar2_notify_onsubtitledownload'] = int(BOXCAR2_NOTIFY_ONSUBTITLEDOWNLOAD)
new_config['Boxcar2']['boxcar2_accesstoken'] = BOXCAR2_ACCESSTOKEN
new_config['Boxcar2']['boxcar2_sound'] = BOXCAR2_SOUND
new_config['Pushbullet'] = {}
new_config['Pushbullet']['use_pushbullet'] = int(USE_PUSHBULLET)
new_config['Pushbullet']['pushbullet_notify_onsnatch'] = int(PUSHBULLET_NOTIFY_ONSNATCH)
new_config['Pushbullet']['pushbullet_notify_ondownload'] = int(PUSHBULLET_NOTIFY_ONDOWNLOAD)
new_config['Pushbullet']['pushbullet_notify_onsubtitledownload'] = int(PUSHBULLET_NOTIFY_ONSUBTITLEDOWNLOAD)
new_config['Pushbullet']['pushbullet_access_token'] = PUSHBULLET_ACCESS_TOKEN
new_config['Pushbullet']['pushbullet_device_iden'] = PUSHBULLET_DEVICE_IDEN
new_config['Pushover'] = {}
new_config['Pushover']['use_pushover'] = int(USE_PUSHOVER)
new_config['Pushover']['pushover_notify_onsnatch'] = int(PUSHOVER_NOTIFY_ONSNATCH)
new_config['Pushover']['pushover_notify_ondownload'] = int(PUSHOVER_NOTIFY_ONDOWNLOAD)
new_config['Pushover']['pushover_notify_onsubtitledownload'] = int(PUSHOVER_NOTIFY_ONSUBTITLEDOWNLOAD)
new_config['Pushover']['pushover_userkey'] = PUSHOVER_USERKEY
new_config['Pushover']['pushover_apikey'] = PUSHOVER_APIKEY
new_config['Pushover']['pushover_priority'] = PUSHOVER_PRIORITY
new_config['Pushover']['pushover_device'] = PUSHOVER_DEVICE
new_config['Pushover']['pushover_sound'] = PUSHOVER_SOUND
new_config['Growl'] = {}
new_config['Growl']['use_growl'] = int(USE_GROWL)
new_config['Growl']['growl_notify_onsnatch'] = int(GROWL_NOTIFY_ONSNATCH)
new_config['Growl']['growl_notify_ondownload'] = int(GROWL_NOTIFY_ONDOWNLOAD)
new_config['Growl']['growl_notify_onsubtitledownload'] = int(GROWL_NOTIFY_ONSUBTITLEDOWNLOAD)
new_config['Growl']['growl_host'] = GROWL_HOST
new_config['Growl']['growl_password'] = helpers.encrypt(GROWL_PASSWORD, ENCRYPTION_VERSION)
new_config['Prowl'] = {}
new_config['Prowl']['use_prowl'] = int(USE_PROWL)
new_config['Prowl']['prowl_notify_onsnatch'] = int(PROWL_NOTIFY_ONSNATCH)
new_config['Prowl']['prowl_notify_ondownload'] = int(PROWL_NOTIFY_ONDOWNLOAD)
new_config['Prowl']['prowl_notify_onsubtitledownload'] = int(PROWL_NOTIFY_ONSUBTITLEDOWNLOAD)
new_config['Prowl']['prowl_api'] = PROWL_API
new_config['Prowl']['prowl_priority'] = PROWL_PRIORITY
new_config['Libnotify'] = {}
new_config['Libnotify']['use_libnotify'] = int(USE_LIBNOTIFY)
new_config['Libnotify']['libnotify_notify_onsnatch'] = int(LIBNOTIFY_NOTIFY_ONSNATCH)
new_config['Libnotify']['libnotify_notify_ondownload'] = int(LIBNOTIFY_NOTIFY_ONDOWNLOAD)
new_config['Libnotify']['libnotify_notify_onsubtitledownload'] = int(LIBNOTIFY_NOTIFY_ONSUBTITLEDOWNLOAD)
new_config['Pushalot'] = {}
new_config['Pushalot']['use_pushalot'] = int(USE_PUSHALOT)
new_config['Pushalot']['pushalot_notify_onsnatch'] = int(PUSHALOT_NOTIFY_ONSNATCH)
new_config['Pushalot']['pushalot_notify_ondownload'] = int(PUSHALOT_NOTIFY_ONDOWNLOAD)
new_config['Pushalot']['pushalot_notify_onsubtitledownload'] = int(PUSHALOT_NOTIFY_ONSUBTITLEDOWNLOAD)
new_config['Pushalot']['pushalot_authorizationtoken'] = PUSHALOT_AUTHORIZATIONTOKEN
new_config['Trakt'] = {}
new_config['Trakt']['use_trakt'] = int(USE_TRAKT)
new_config['Trakt']['trakt_remove_watchlist'] = int(TRAKT_REMOVE_WATCHLIST)
new_config['Trakt']['trakt_remove_serieslist'] = int(TRAKT_REMOVE_SERIESLIST)
new_config['Trakt']['trakt_use_watchlist'] = int(TRAKT_USE_WATCHLIST)
new_config['Trakt']['trakt_method_add'] = int(TRAKT_METHOD_ADD)
new_config['Trakt']['trakt_start_paused'] = int(TRAKT_START_PAUSED)
new_config['Trakt']['trakt_sync'] = int(TRAKT_SYNC)
new_config['Trakt']['trakt_default_indexer'] = int(TRAKT_DEFAULT_INDEXER)
new_config['Trakt']['trakt_update_collection'] = trakt_helpers.build_config_string(TRAKT_UPDATE_COLLECTION)
new_config['Trakt']['trakt_accounts'] = TraktAPI.build_config_string(TRAKT_ACCOUNTS)
new_config['Trakt']['trakt_mru'] = TRAKT_MRU
new_config['Slack'] = {}
new_config['Slack']['use_slack'] = int(USE_SLACK)
new_config['Slack']['slack_notify_onsnatch'] = int(SLACK_NOTIFY_ONSNATCH)
new_config['Slack']['slack_notify_ondownload'] = int(SLACK_NOTIFY_ONDOWNLOAD)
new_config['Slack']['slack_notify_onsubtitledownload'] = int(SLACK_NOTIFY_ONSUBTITLEDOWNLOAD)
new_config['Slack']['slack_channel'] = SLACK_CHANNEL
new_config['Slack']['slack_as_authed'] = int(SLACK_AS_AUTHED)
new_config['Slack']['slack_bot_name'] = SLACK_BOT_NAME
new_config['Slack']['slack_icon_url'] = SLACK_ICON_URL
new_config['Slack']['slack_access_token'] = SLACK_ACCESS_TOKEN
new_config['Discordapp'] = {}
new_config['Discordapp']['use_discordapp'] = int(USE_DISCORDAPP)
new_config['Discordapp']['discordapp_notify_onsnatch'] = int(DISCORDAPP_NOTIFY_ONSNATCH)
new_config['Discordapp']['discordapp_notify_ondownload'] = int(DISCORDAPP_NOTIFY_ONDOWNLOAD)
new_config['Discordapp']['discordapp_notify_onsubtitledownload'] = int(DISCORDAPP_NOTIFY_ONSUBTITLEDOWNLOAD)
new_config['Discordapp']['discordapp_as_authed'] = int(DISCORDAPP_AS_AUTHED)
new_config['Discordapp']['discordapp_username'] = DISCORDAPP_USERNAME
new_config['Discordapp']['discordapp_icon_url'] = DISCORDAPP_ICON_URL
new_config['Discordapp']['discordapp_as_tts'] = int(DISCORDAPP_AS_TTS)
new_config['Discordapp']['discordapp_access_token'] = DISCORDAPP_ACCESS_TOKEN
new_config['Gitter'] = {}
new_config['Gitter']['use_gitter'] = int(USE_GITTER)
new_config['Gitter']['gitter_notify_onsnatch'] = int(GITTER_NOTIFY_ONSNATCH)
new_config['Gitter']['gitter_notify_ondownload'] = int(GITTER_NOTIFY_ONDOWNLOAD)
new_config['Gitter']['gitter_notify_onsubtitledownload'] = int(GITTER_NOTIFY_ONSUBTITLEDOWNLOAD)
new_config['Gitter']['gitter_room'] = GITTER_ROOM
new_config['Gitter']['gitter_access_token'] = GITTER_ACCESS_TOKEN
new_config['Twitter'] = {}
new_config['Twitter']['use_twitter'] = int(USE_TWITTER)
new_config['Twitter']['twitter_notify_onsnatch'] = int(TWITTER_NOTIFY_ONSNATCH)
new_config['Twitter']['twitter_notify_ondownload'] = int(TWITTER_NOTIFY_ONDOWNLOAD)
new_config['Twitter']['twitter_notify_onsubtitledownload'] = int(TWITTER_NOTIFY_ONSUBTITLEDOWNLOAD)
new_config['Twitter']['twitter_username'] = TWITTER_USERNAME
new_config['Twitter']['twitter_password'] = helpers.encrypt(TWITTER_PASSWORD, ENCRYPTION_VERSION)
new_config['Twitter']['twitter_prefix'] = TWITTER_PREFIX
new_config['Email'] = {}
new_config['Email']['use_email'] = int(USE_EMAIL)
new_config['Email']['email_old_subjects'] = int(EMAIL_OLD_SUBJECTS)
new_config['Email']['email_notify_onsnatch'] = int(EMAIL_NOTIFY_ONSNATCH)
new_config['Email']['email_notify_ondownload'] = int(EMAIL_NOTIFY_ONDOWNLOAD)
new_config['Email']['email_notify_onsubtitledownload'] = int(EMAIL_NOTIFY_ONSUBTITLEDOWNLOAD)
new_config['Email']['email_host'] = EMAIL_HOST
new_config['Email']['email_port'] = int(EMAIL_PORT)
new_config['Email']['email_tls'] = int(EMAIL_TLS)
new_config['Email']['email_user'] = EMAIL_USER
new_config['Email']['email_password'] = helpers.encrypt(EMAIL_PASSWORD, ENCRYPTION_VERSION)
new_config['Email']['email_from'] = EMAIL_FROM
new_config['Email']['email_list'] = EMAIL_LIST
# remove empty stanzas
for k in filter(lambda c: not new_config[c], cfg_keys):
del new_config[k]
new_config['Newznab'] = {}
new_config['Newznab']['newznab_data'] = NEWZNAB_DATA
new_config['TorrentRss'] = {}
new_config['TorrentRss']['torrentrss_data'] = '!!!'.join([x.config_str() for x in torrentRssProviderList])
torrent_rss = '!!!'.join([x.config_str() for x in torrentRssProviderList])
if torrent_rss:
new_config['TorrentRss'] = {}
new_config['TorrentRss']['torrentrss_data'] = torrent_rss
new_config['GUI'] = {}
new_config['GUI']['gui_name'] = GUI_NAME

View file

@ -27,7 +27,7 @@ from sickbeard import encodingKludge as ek
from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException
MIN_DB_VERSION = 9 # oldest db version we support migrating from
MAX_DB_VERSION = 20009
MAX_DB_VERSION = 20010
TEST_BASE_VERSION = None # the base production db version, only needed for TEST db versions (>=100000)
@ -198,7 +198,7 @@ class InitialSchema(db.SchemaUpgrade):
# = Main DB Migrations =
# ======================
# Add new migrations at the bottom of the list; subclass the previous migration.
# 0 -> 20007
# 0 -> 20009
def execute(self):
db.backup_database('sickbeard.db', self.checkDBVersion())
@ -250,14 +250,15 @@ class InitialSchema(db.SchemaUpgrade):
# tv_episodes_watched
'CREATE TABLE tv_episodes_watched (tvep_id NUMERIC NOT NULL, clientep_id TEXT, label TEXT,'
' played NUMERIC DEFAULT 0 NOT NULL, date_watched NUMERIC NOT NULL, date_added NUMERIC,'
' status NUMERIC, location TEXT, file_size NUMERIC, hide INT default 0 not null)',
' status NUMERIC, location TEXT, file_size NUMERIC, hide INT DEFAULT 0 not null)',
# tv_shows
'CREATE TABLE tv_shows (show_id INTEGER PRIMARY KEY, indexer_id NUMERIC, indexer NUMERIC,'
' show_name TEXT, location TEXT, network TEXT, genre TEXT, classification TEXT, runtime NUMERIC,'
' quality NUMERIC, airs TEXT, status TEXT, flatten_folders NUMERIC, paused NUMERIC, startyear NUMERIC,'
' air_by_date NUMERIC, lang TEXT, subtitles NUMERIC, notify_list TEXT, imdb_id TEXT,'
' last_update_indexer NUMERIC, dvdorder NUMERIC, archive_firstmatch NUMERIC, rls_require_words TEXT,'
' rls_ignore_words TEXT, sports NUMERIC, anime NUMERIC, scene NUMERIC, overview TEXT, tag TEXT)',
' rls_ignore_words TEXT, sports NUMERIC, anime NUMERIC, scene NUMERIC, overview TEXT, tag TEXT,'
' prune INT DEFAULT 0)',
'CREATE UNIQUE INDEX idx_indexer_id ON tv_shows (indexer_id)',
# tv_shows_not_found
'CREATE TABLE tv_shows_not_found (indexer NUMERIC NOT NULL, indexer_id NUMERIC NOT NULL,'
@ -1406,3 +1407,17 @@ class AddWatched(db.SchemaUpgrade):
self.setDBVersion(20009)
return self.checkDBVersion()
# 20009 -> 20010
class AddPrune(db.SchemaUpgrade):
def execute(self):
if not self.hasColumn('tv_shows', 'prune'):
logger.log('Adding prune to tv_shows')
db.backup_database('sickbeard.db', self.checkDBVersion())
self.addColumn('tv_shows', 'prune', 'INT', 0)
self.setDBVersion(20010)
return self.checkDBVersion()

View file

@ -554,6 +554,7 @@ def MigrationCode(myDB):
20006: sickbeard.mainDB.DBIncreaseTo20007,
20007: sickbeard.mainDB.AddWebdlTypesTable,
20008: sickbeard.mainDB.AddWatched,
20009: sickbeard.mainDB.AddPrune,
# 20002: sickbeard.mainDB.AddCoolSickGearFeature3,
}

View file

@ -42,6 +42,7 @@ import adba
import requests
import requests.exceptions
from cfscrape import CloudflareScraper
from lib.send2trash import send2trash
import sickbeard
import subliminal
@ -177,6 +178,39 @@ def sanitizeFileName(name):
return name
def remove_file(filepath, tree=False, prefix_failure='', log_level=logger.MESSAGE):
"""
Remove file based on setting for trash v permanent delete
:param filepath: Path and file name
:type filepath: String
:param tree: Remove file tree
:type tree: Bool
:param prefix_failure: Text to prepend to error log, e.g. show id
:type prefix_failure: String
:param log_level: Log level to use for error
:type log_level: Int
:return: Type of removal ('Deleted' or 'Trashed') if filepath does not exist or None if no removal occurred
:rtype: String or None
"""
result = None
if filepath:
try:
result = 'Deleted'
if sickbeard.TRASH_REMOVE_SHOW:
result = 'Trashed'
ek.ek(send2trash, filepath)
elif tree:
ek.ek(shutil.rmtree, filepath)
else:
ek.ek(os.remove, filepath)
except OSError as e:
logger.log(u'%sUnable to %s %s %s: %s' % (prefix_failure, ('delete', 'trash')[sickbeard.TRASH_REMOVE_SHOW],
('file', 'dir')[tree], filepath, str(e.strerror)), log_level)
return (None, result)[filepath and not ek.ek(os.path.exists, filepath)]
def remove_file_failed(filename):
try:
ek.ek(os.remove, filename)
@ -1132,6 +1166,8 @@ def getURL(url, post_data=None, params=None, headers=None, timeout=30, session=N
cache_dir = sickbeard.CACHE_DIR or _getTempDir()
session = CacheControl(sess=session, cache=caches.FileCache(ek.ek(os.path.join, cache_dir, 'sessions')))
provider = kwargs.pop('provider', None)
# session master headers
req_headers = {'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Accept-Encoding': 'gzip,deflate'}
@ -1186,6 +1222,10 @@ def getURL(url, post_data=None, params=None, headers=None, timeout=30, session=N
url = urlparse.urlunparse(parsed)
response = session.get(url, timeout=timeout, **kwargs)
# noinspection PyProtectedMember
if provider and provider._has_signature(response.content):
return response.content
if raise_status_code:
response.raise_for_status()
@ -1694,7 +1734,7 @@ def path_mapper(search, replace, subject):
replace = re.sub(r'[\\]', delim, replace)
path = re.sub(r'[\\]', delim, subject)
result = re.sub('(?i)^%s' % search, replace, path)
result = os.path.normpath(re.sub(delim, '/', result))
result = ek.ek(os.path.normpath, re.sub(delim, '/', result))
return result, result != subject

View file

@ -33,7 +33,6 @@ from sickbeard.metadata.generic import GenericMetadata
from lib.hachoir.parser import createParser
from lib.hachoir.metadata import extractMetadata
from lib.send2trash import send2trash
try:
import zlib
except:
@ -417,23 +416,15 @@ class ImageCache:
void = False
if not void and need_images[self.FANART]:
action = ('delete', 'trash')[sickbeard.TRASH_REMOVE_SHOW]
cache_path = self.fanart_path(show_id).replace('%s.fanart.jpg' % show_id, '')
# num_images = len(fnmatch.filter(os.listdir(cache_path), '*.jpg'))
for cache_dir in ek.ek(glob.glob, cache_path):
if show_id in sickbeard.FANART_RATINGS:
del (sickbeard.FANART_RATINGS[show_id])
logger.log(u'Attempt to %s purge cache file %s' % (action, cache_dir), logger.DEBUG)
try:
if sickbeard.TRASH_REMOVE_SHOW:
send2trash(cache_dir)
else:
shutil.rmtree(cache_dir)
except OSError as e:
logger.log(u'Unable to %s %s: %s / %s' % (action, cache_dir, repr(e), str(e)), logger.WARNING)
result = helpers.remove_file(cache_dir, tree=True)
if result:
logger.log(u'%s cache file %s' % (result, cache_dir), logger.DEBUG)
try:
checked_files = []

View file

@ -30,7 +30,7 @@ def test_nzbget(host, use_https, username, password):
result = False
if not host:
msg = 'No NZBget host found. Please configure it'
msg = 'No NZBGet host found. Please configure it'
logger.log(msg, logger.ERROR)
return result, msg, None
@ -49,12 +49,12 @@ def test_nzbget(host, use_https, username, password):
logger.log(u'NZBGet URL: %s' % url, logger.DEBUG)
except moves.http_client.socket.error:
msg = 'Please check NZBget host and port (if it is running). NZBget is not responding to these values'
msg = 'Please check NZBGet host and port (if it is running). NZBGet is not responding to these values'
logger.log(msg, logger.ERROR)
except moves.xmlrpc_client.ProtocolError as e:
if 'Unauthorized' == e.errmsg:
msg = 'NZBget username or password is incorrect'
msg = 'NZBGet username or password is incorrect'
logger.log(msg, logger.ERROR)
else:
msg = 'Protocol Error: %s' % e.errmsg
@ -150,11 +150,11 @@ def send_nzb(nzb):
nzbget_prio, False, nzb.url)
if nzbget_result:
logger.log(u'NZB sent to NZBget successfully', logger.DEBUG)
logger.log(u'NZB sent to NZBGet successfully', logger.DEBUG)
result = True
else:
logger.log(u'NZBget could not add %s to the queue' % ('%s.nzb' % nzb.name), logger.ERROR)
logger.log(u'NZBGet could not add %s to the queue' % ('%s.nzb' % nzb.name), logger.ERROR)
except(StandardError, Exception):
logger.log(u'Connect Error to NZBget: could not add %s to the queue' % ('%s.nzb' % nzb.name), logger.ERROR)
logger.log(u'Connect Error to NZBGet: could not add %s to the queue' % ('%s.nzb' % nzb.name), logger.ERROR)
return result

View file

@ -43,11 +43,6 @@ from sickbeard.name_parser.parser import NameParser, InvalidNameException, Inval
from lib import adba
try:
from lib.send2trash import send2trash
except ImportError:
pass
class PostProcessor(object):
"""
@ -235,13 +230,7 @@ class PostProcessor(object):
except:
self._log(u'Cannot change permissions to writeable to delete file: %s' % cur_file, logger.WARNING)
try:
if self.use_trash:
ek.ek(send2trash, cur_file)
else:
ek.ek(os.remove, cur_file)
except OSError as e:
self._log(u'Unable to delete file %s: %s' % (cur_file, str(e.strerror)), logger.DEBUG)
helpers.remove_file(cur_file, log_level=logger.DEBUG)
if True is not ek.ek(os.path.isfile, cur_file):
self._log(u'Deleted file ' + cur_file, logger.DEBUG)

View file

@ -48,11 +48,6 @@ try:
except ImportError:
from lib import simplejson as json
try:
from lib.send2trash import send2trash
except ImportError:
pass
# noinspection PyArgumentList
class ProcessTVShow(object):
@ -150,13 +145,8 @@ class ProcessTVShow(object):
ek.ek(os.chmod, cur_file_path, stat.S_IWRITE)
except OSError as e:
self._log_helper(u'Cannot change permissions of %s: %s' % (cur_file_path, str(e.strerror)))
try:
if use_trash:
ek.ek(send2trash, cur_file_path)
else:
ek.ek(os.remove, cur_file_path)
except OSError as e:
self._log_helper(u'Unable to delete file %s: %s' % (cur_file, str(e.strerror)))
helpers.remove_file(cur_file_path)
if ek.ek(os.path.isfile, cur_file_path):
result = False

View file

@ -41,7 +41,7 @@ def search_propers(proper_list=None):
if not sickbeard.DOWNLOAD_PROPERS:
return
logger.log(('Checking propers from recent search', 'Beginning search for new propers')[None is proper_list])
logger.log(('Checking Propers from recent search', 'Beginning search for new Propers')[None is proper_list])
age_shows, age_anime = sickbeard.BACKLOG_DAYS + 2, 14
aired_since_shows = datetime.datetime.today() - datetime.timedelta(days=age_shows)
@ -53,7 +53,7 @@ def search_propers(proper_list=None):
if propers:
_download_propers(propers)
else:
logger.log(u'No downloads or snatches found for the last %s%s days to use for a propers search' %
logger.log('No downloads or snatches found for the last %s%s days to use for a Propers search' %
(age_shows, ('', ' (%s for anime)' % age_anime)[helpers.has_anime()]))
run_at = ''
@ -63,18 +63,17 @@ def search_propers(proper_list=None):
proper_sch = sickbeard.properFinderScheduler
if None is proper_sch.start_time:
run_in = proper_sch.lastRun + proper_sch.cycleTime - datetime.datetime.now()
run_at = u', next check '
run_at = ', next check '
if datetime.timedelta() > run_in:
run_at += u'imminent'
run_at += 'imminent'
else:
hours, remainder = divmod(run_in.seconds, 3600)
minutes, seconds = divmod(remainder, 60)
run_at += u'in approx. ' + ('%dh, %dm' % (hours, minutes) if 0 < hours else
'%dm, %ds' % (minutes, seconds))
run_at += 'in approx. ' + ('%dm, %ds' % (minutes, seconds), '%dh, %dm' % (hours, minutes))[0 < hours]
logger.log(u'Completed search for new propers%s' % run_at)
logger.log('Completed search for new Propers%s' % run_at)
else:
logger.log(u'Completed checking propers from recent search')
logger.log('Completed checking Propers from recent search')
def get_old_proper_level(show_obj, indexer, indexerid, season, episodes, old_status, new_quality,
@ -89,10 +88,12 @@ def get_old_proper_level(show_obj, indexer, indexerid, season, episodes, old_sta
my_db = db.DBConnection()
np = NameParser(False, showObj=show_obj)
for episode in episodes:
result = my_db.select('SELECT resource FROM history WHERE showid = ? AND season = ? AND episode = ? AND '
'(' + ' OR '.join("action LIKE '%%%02d'" % x for x in SNATCHED_ANY) + ') '
'ORDER BY date DESC LIMIT 1',
[indexerid, season, episode])
result = my_db.select(
'SELECT resource FROM history'
' WHERE showid = ?'
' AND season = ? AND episode = ? AND '
'(%s) ORDER BY date DESC LIMIT 1' % (' OR '.join('action LIKE "%%%02d"' % x for x in SNATCHED_ANY)),
[indexerid, season, episode])
if not result or not isinstance(result[0]['resource'], basestring) or not result[0]['resource']:
continue
nq = Quality.sceneQuality(result[0]['resource'], show_obj.is_anime)
@ -180,9 +181,10 @@ def load_webdl_types():
def _get_proper_list(aired_since_shows, recent_shows, recent_anime, proper_list=None):
propers = {}
# for each provider get a list of the
my_db = db.DBConnection()
# for each provider get a list of arbitrary Propers
orig_thread_name = threading.currentThread().name
providers = [x for x in sickbeard.providers.sortedProviderList() if x.is_active()]
providers = filter(lambda p: p.is_active(), sickbeard.providers.sortedProviderList())
for cur_provider in providers:
if not recent_anime and cur_provider.anime_only:
continue
@ -192,253 +194,277 @@ def _get_proper_list(aired_since_shows, recent_shows, recent_anime, proper_list=
if not found_propers:
continue
else:
threading.currentThread().name = orig_thread_name + ' :: [' + cur_provider.name + ']'
threading.currentThread().name = '%s :: [%s]' % (orig_thread_name, cur_provider.name)
logger.log(u'Searching for new PROPER releases')
logger.log('Searching for new PROPER releases')
try:
found_propers = cur_provider.find_propers(search_date=aired_since_shows, shows=recent_shows,
anime=recent_anime)
except exceptions.AuthException as e:
logger.log(u'Authentication error: ' + ex(e), logger.ERROR)
logger.log('Authentication error: %s' % ex(e), logger.ERROR)
continue
except Exception as e:
logger.log(u'Error while searching ' + cur_provider.name + ', skipping: ' + ex(e), logger.ERROR)
logger.log('Error while searching %s, skipping: %s' % (cur_provider.name, ex(e)), logger.ERROR)
logger.log(traceback.format_exc(), logger.ERROR)
continue
finally:
threading.currentThread().name = orig_thread_name
# if they haven't been added by a different provider than add the proper to the list
count = 0
for x in found_propers:
name = _generic_name(x.name)
if name not in propers:
try:
np = NameParser(False, try_scene_exceptions=True, showObj=x.parsed_show, indexer_lookup=False)
parse_result = np.parse(x.name)
if parse_result.series_name and parse_result.episode_numbers and \
(parse_result.show.indexer, parse_result.show.indexerid) in recent_shows + recent_anime:
cur_size = getattr(x, 'size', None)
if failed_history.has_failed(x.name, cur_size, cur_provider.name):
continue
logger.log(u'Found new proper: ' + x.name, logger.DEBUG)
x.show = parse_result.show.indexerid
x.provider = cur_provider
x.is_repack, x.properlevel = Quality.get_proper_level(parse_result.extra_info_no_name(),
parse_result.version,
parse_result.is_anime,
check_is_repack=True)
x.is_internal = parse_result.extra_info_no_name() and \
re.search(r'\binternal\b', parse_result.extra_info_no_name(), flags=re.I)
x.codec = _get_codec(parse_result.extra_info_no_name())
propers[name] = x
count += 1
except (InvalidNameException, InvalidShowException):
continue
except (StandardError, Exception):
continue
# if they haven't been added by a different provider than add the Proper to the list
for cur_proper in found_propers:
name = _generic_name(cur_proper.name)
if name in propers:
continue
cur_provider.log_result('Propers', count, '%s' % cur_provider.name)
try:
np = NameParser(False, try_scene_exceptions=True, showObj=cur_proper.parsed_show, indexer_lookup=False)
parse_result = np.parse(cur_proper.name)
except (InvalidNameException, InvalidShowException, Exception):
continue
# take the list of unique propers and get it sorted by
sorted_propers = sorted(propers.values(), key=operator.attrgetter('properlevel', 'date'), reverse=True)
verified_propers = set()
# get the show object
cur_proper.parsed_show = (cur_proper.parsed_show
or helpers.findCertainShow(sickbeard.showList, parse_result.show.indexerid))
if None is cur_proper.parsed_show:
logger.log('Skip download; cannot find show with indexerid [%s]' % cur_proper.indexerid, logger.ERROR)
continue
for cur_proper in sorted_propers:
cur_proper.indexer = cur_proper.parsed_show.indexer
cur_proper.indexerid = cur_proper.parsed_show.indexerid
np = NameParser(False, try_scene_exceptions=True, showObj=cur_proper.parsed_show, indexer_lookup=False)
try:
parse_result = np.parse(cur_proper.name)
except (StandardError, Exception):
continue
if not (-1 != cur_proper.indexerid and parse_result.series_name and parse_result.episode_numbers
and (cur_proper.indexer, cur_proper.indexerid) in recent_shows + recent_anime):
continue
# set the indexerid in the db to the show's indexerid
cur_proper.indexerid = parse_result.show.indexerid
# set the indexer in the db to the show's indexer
cur_proper.indexer = parse_result.show.indexer
# populate our Proper instance
cur_proper.season = parse_result.season_number if None is not parse_result.season_number else 1
cur_proper.episode = parse_result.episode_numbers[0]
cur_proper.release_group = parse_result.release_group
cur_proper.version = parse_result.version
cur_proper.extra_info = parse_result.extra_info
cur_proper.extra_info_no_name = parse_result.extra_info_no_name
cur_proper.quality = Quality.nameQuality(cur_proper.name, parse_result.is_anime)
cur_proper.is_anime = parse_result.is_anime
# only get anime proper if it has release group and version
if parse_result.is_anime:
if not cur_proper.release_group and -1 == cur_proper.version:
logger.log(u'Proper %s doesn\'t have a release group and version, ignoring it' % cur_proper.name,
# only get anime Proper if it has release group and version
if parse_result.is_anime and not parse_result.release_group and -1 == parse_result.version:
logger.log('Ignored Proper with no release group and version in name [%s]' % cur_proper.name,
logger.DEBUG)
continue
if not show_name_helpers.pass_wordlist_checks(cur_proper.name, parse=False, indexer_lookup=False):
logger.log(u'Proper %s isn\'t a valid scene release that we want, ignoring it' % cur_proper.name,
logger.DEBUG)
continue
re_extras = dict(re_prefix='.*', re_suffix='.*')
result = show_name_helpers.contains_any(cur_proper.name, parse_result.show.rls_ignore_words, **re_extras)
if None is not result and result:
logger.log(u'Ignored: %s for containing ignore word' % cur_proper.name)
continue
result = show_name_helpers.contains_any(cur_proper.name, parse_result.show.rls_require_words, **re_extras)
if None is not result and not result:
logger.log(u'Ignored: %s for not containing any required word match' % cur_proper.name)
continue
# check if we actually want this proper (if it's the right quality)
my_db = db.DBConnection()
sql_results = my_db.select(
'SELECT release_group, status, version, release_name FROM tv_episodes WHERE showid = ? AND indexer = ? ' +
'AND season = ? AND episode = ?',
[cur_proper.indexerid, cur_proper.indexer, cur_proper.season, cur_proper.episode])
if not sql_results:
continue
# only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones)
# don't take proper of the same level we already downloaded
old_status, old_quality = Quality.splitCompositeStatus(int(sql_results[0]['status']))
cur_proper.is_repack, cur_proper.proper_level = Quality.get_proper_level(cur_proper.extra_info_no_name(),
cur_proper.version,
cur_proper.is_anime,
check_is_repack=True)
old_release_group = sql_results[0]['release_group']
# check if we want this release: same quality as current, current has correct status
# restrict other release group releases to proper's
if old_status not in SNATCHED_ANY + [DOWNLOADED, ARCHIVED] \
or cur_proper.quality != old_quality \
or (cur_proper.is_repack and cur_proper.release_group != old_release_group):
continue
np = NameParser(False, try_scene_exceptions=True, showObj=parse_result.show, indexer_lookup=False)
try:
extra_info = np.parse(sql_results[0]['release_name']).extra_info_no_name()
except (StandardError, Exception):
extra_info = None
old_proper_level, old_is_internal, old_codec, old_extra_no_name, old_name = \
get_old_proper_level(parse_result.show, cur_proper.indexer, cur_proper.indexerid, cur_proper.season,
parse_result.episode_numbers, old_status, cur_proper.quality, extra_info,
cur_proper.version, cur_proper.is_anime)
old_name = (old_name, sql_results[0]['release_name'])[old_name in ('', None)]
if cur_proper.proper_level < old_proper_level:
continue
elif cur_proper.proper_level == old_proper_level:
if '264' == cur_proper.codec and 'xvid' == old_codec:
pass
elif old_is_internal and not cur_proper.is_internal:
pass
else:
if not show_name_helpers.pass_wordlist_checks(cur_proper.name, parse=False, indexer_lookup=False):
logger.log('Ignored unwanted Proper [%s]' % cur_proper.name, logger.DEBUG)
continue
log_same_grp = 'Skipping proper from release group: [%s], does not match existing release group: [%s] for [%s]'\
% (cur_proper.release_group, old_release_group, cur_proper.name)
is_web = (old_quality in (Quality.HDWEBDL, Quality.FULLHDWEBDL, Quality.UHD4KWEB) or
(old_quality == Quality.SDTV and re.search(r'\Wweb.?(dl|rip|.[hx]26[45])\W',
str(sql_results[0]['release_name']), re.I)))
if is_web:
old_webdl_type = get_webdl_type(old_extra_no_name, old_name)
new_webdl_type = get_webdl_type(cur_proper.extra_info_no_name(), cur_proper.name)
if old_webdl_type != new_webdl_type:
logger.log('Skipping proper webdl source: [%s], does not match existing webdl source: [%s] for [%s]'
% (old_webdl_type, new_webdl_type, cur_proper.name), logger.DEBUG)
re_x = dict(re_prefix='.*', re_suffix='.*')
result = show_name_helpers.contains_any(cur_proper.name, cur_proper.parsed_show.rls_ignore_words, **re_x)
if None is not result and result:
logger.log('Ignored Proper containing ignore word [%s]' % cur_proper.name, logger.DEBUG)
continue
# for webldls, prevent propers from different groups
if sickbeard.PROPERS_WEBDL_ONEGRP and is_web and cur_proper.release_group != old_release_group:
logger.log(log_same_grp, logger.DEBUG)
continue
# check if we actually want this proper (if it's the right release group and a higher version)
if parse_result.is_anime:
old_version = int(sql_results[0]['version'])
if -1 < old_version < cur_proper.version:
logger.log(u'Found new anime v%s to replace existing v%s' % (cur_proper.version, old_version))
else:
result = show_name_helpers.contains_any(cur_proper.name, cur_proper.parsed_show.rls_require_words, **re_x)
if None is not result and not result:
logger.log('Ignored Proper for not containing any required word [%s]' % cur_proper.name, logger.DEBUG)
continue
if cur_proper.release_group != old_release_group:
cur_size = getattr(cur_proper, 'size', None)
if failed_history.has_failed(cur_proper.name, cur_size, cur_provider.name):
continue
cur_proper.season = parse_result.season_number if None is not parse_result.season_number else 1
cur_proper.episode = parse_result.episode_numbers[0]
# check if we actually want this Proper (if it's the right quality)
sql_results = my_db.select(
'SELECT release_group, status, version, release_name'
' FROM tv_episodes'
' WHERE showid = ? AND indexer = ? AND season = ? AND episode = ?'
' LIMIT 1',
[cur_proper.indexerid, cur_proper.indexer, cur_proper.season, cur_proper.episode])
if not sql_results:
continue
# only keep the Proper if we already retrieved the same quality ep (don't get better/worse ones)
# check if we want this release: same quality as current, current has correct status
# restrict other release group releases to Proper's
old_status, old_quality = Quality.splitCompositeStatus(int(sql_results[0]['status']))
cur_proper.quality = Quality.nameQuality(cur_proper.name, parse_result.is_anime)
cur_proper.is_repack, cur_proper.properlevel = Quality.get_proper_level(
parse_result.extra_info_no_name(), parse_result.version, parse_result.is_anime, check_is_repack=True)
cur_proper.proper_level = cur_proper.properlevel # local non global value
old_release_group = sql_results[0]['release_group']
same_release_group = parse_result.release_group == old_release_group
if old_status not in SNATCHED_ANY + [DOWNLOADED, ARCHIVED] \
or cur_proper.quality != old_quality \
or (cur_proper.is_repack and not same_release_group):
continue
np = NameParser(False, try_scene_exceptions=True, showObj=cur_proper.parsed_show, indexer_lookup=False)
try:
extra_info = np.parse(sql_results[0]['release_name']).extra_info_no_name()
except (StandardError, Exception):
extra_info = None
# don't take Proper of the same level we already downloaded
old_proper_level, old_is_internal, old_codec, old_extra_no_name, old_name = \
get_old_proper_level(cur_proper.parsed_show, cur_proper.indexer, cur_proper.indexerid,
cur_proper.season, parse_result.episode_numbers,
old_status, cur_proper.quality, extra_info,
parse_result.version, parse_result.is_anime)
cur_proper.codec = _get_codec(parse_result.extra_info_no_name())
if cur_proper.proper_level < old_proper_level:
continue
cur_proper.is_internal = (parse_result.extra_info_no_name() and
re.search(r'\binternal\b', parse_result.extra_info_no_name(), flags=re.I))
if cur_proper.proper_level == old_proper_level:
if (('264' == cur_proper.codec and 'xvid' == old_codec)
or (old_is_internal and not cur_proper.is_internal)):
pass
continue
is_web = (old_quality in (Quality.HDWEBDL, Quality.FULLHDWEBDL, Quality.UHD4KWEB) or
(old_quality == Quality.SDTV and re.search(r'\Wweb.?(dl|rip|.[hx]26[45])\W',
str(sql_results[0]['release_name']), re.I)))
if is_web:
old_name = (old_name, sql_results[0]['release_name'])[old_name in ('', None)]
old_webdl_type = get_webdl_type(old_extra_no_name, old_name)
new_webdl_type = get_webdl_type(parse_result.extra_info_no_name(), cur_proper.name)
if old_webdl_type != new_webdl_type:
logger.log('Ignored Proper webdl source [%s], does not match existing webdl source [%s] for [%s]'
% (old_webdl_type, new_webdl_type, cur_proper.name), logger.DEBUG)
continue
# for webdls, prevent Propers from different groups
log_same_grp = 'Ignored Proper from release group [%s] does not match existing group [%s] for [%s]' \
% (parse_result.release_group, old_release_group, cur_proper.name)
if sickbeard.PROPERS_WEBDL_ONEGRP and is_web and not same_release_group:
logger.log(log_same_grp, logger.DEBUG)
continue
# if the show is in our list and there hasn't been a proper already added for that particular episode
# then add it to our list of propers
if cur_proper.indexerid != -1:
if (cur_proper.indexerid, cur_proper.indexer, cur_proper.season, cur_proper.episode) not in map(
operator.attrgetter('indexerid', 'indexer', 'season', 'episode'), verified_propers):
logger.log(u'Found a proper that may be useful: %s' % cur_proper.name)
verified_propers.add(cur_proper)
# check if we actually want this Proper (if it's the right release group and a higher version)
if parse_result.is_anime:
old_version = int(sql_results[0]['version'])
if not (-1 < old_version < parse_result.version):
continue
if not same_release_group:
logger.log(log_same_grp, logger.DEBUG)
continue
found_msg = 'Found anime Proper v%s to replace v%s' % (parse_result.version, old_version)
else:
rp = set()
for vp in verified_propers:
if vp.indexer == cur_proper.indexer and vp.indexerid == cur_proper.indexerid and \
vp.season == cur_proper.season and vp.episode == cur_proper.episode and \
vp.proper_level < cur_proper.proper_level:
rp.add(vp)
if rp:
verified_propers = verified_propers - rp
logger.log(u'Found a proper that may be useful: %s' % cur_proper.name)
verified_propers.add(cur_proper)
found_msg = 'Found Proper [%s]' % cur_proper.name
return list(verified_propers)
# make sure the episode has been downloaded before
history_limit = datetime.datetime.today() - datetime.timedelta(days=30)
history_results = my_db.select(
'SELECT resource FROM history'
' WHERE showid = ?'
' AND season = ? AND episode = ? AND quality = ? AND date >= ?'
' AND (%s)' % ' OR '.join('action LIKE "%%%02d"' % x for x in SNATCHED_ANY + [DOWNLOADED, ARCHIVED]),
[cur_proper.indexerid,
cur_proper.season, cur_proper.episode, cur_proper.quality,
history_limit.strftime(history.dateFormat)])
def _download_propers(proper_list):
for cur_proper in proper_list:
history_limit = datetime.datetime.today() - datetime.timedelta(days=30)
# make sure the episode has been downloaded before
my_db = db.DBConnection()
history_results = my_db.select(
'SELECT resource FROM history ' +
'WHERE showid = ? AND season = ? AND episode = ? AND quality = ? AND date >= ? ' +
'AND (' + ' OR '.join("action LIKE '%%%02d'" % x for x in SNATCHED_ANY + [DOWNLOADED, ARCHIVED]) + ')',
[cur_proper.indexerid, cur_proper.season, cur_proper.episode, cur_proper.quality,
history_limit.strftime(history.dateFormat)])
# if we didn't download this episode in the first place we don't know what quality to use for the proper = skip
if 0 == len(history_results):
logger.log(u'Skipping download because cannot find an original history entry for proper ' + cur_proper.name)
continue
else:
# get the show object
show_obj = helpers.findCertainShow(sickbeard.showList, cur_proper.indexerid)
if None is show_obj:
logger.log(u'Unable to find the show with indexerid ' + str(
cur_proper.indexerid) + ' so unable to download the proper', logger.ERROR)
# skip if the episode has never downloaded, because a previous quality is required to match the Proper
if not len(history_results):
logger.log('Ignored Proper cannot find a recent history item for [%s]' % cur_proper.name, logger.DEBUG)
continue
# make sure that none of the existing history downloads are the same proper we're trying to download
clean_proper_name = _generic_name(helpers.remove_non_release_groups(cur_proper.name, show_obj.is_anime))
# make sure that none of the existing history downloads are the same Proper as the download candidate
clean_proper_name = _generic_name(helpers.remove_non_release_groups(
cur_proper.name, cur_proper.parsed_show.is_anime))
is_same = False
for result in history_results:
for hitem in history_results:
# if the result exists in history already we need to skip it
if clean_proper_name == _generic_name(helpers.remove_non_release_groups(
ek.ek(os.path.basename, result['resource']))):
ek.ek(os.path.basename, hitem['resource']))):
is_same = True
break
if is_same:
logger.log(u'This proper is already in history, skipping it', logger.DEBUG)
logger.log('Ignored Proper already in history [%s]' % cur_proper.name)
continue
ep_obj = show_obj.getEpisode(cur_proper.season, cur_proper.episode)
logger.log(found_msg, logger.DEBUG)
# finish populating the Proper instance
# cur_proper.show = cur_proper.parsed_show.indexerid
cur_proper.provider = cur_provider
cur_proper.extra_info = parse_result.extra_info
cur_proper.extra_info_no_name = parse_result.extra_info_no_name
cur_proper.release_group = parse_result.release_group
cur_proper.is_anime = parse_result.is_anime
cur_proper.version = parse_result.version
propers[name] = cur_proper
cur_provider.log_result('Propers', len(propers), '%s' % cur_provider.name)
return propers.values()
def _download_propers(proper_list):
verified_propers = True
consumed_proper = []
downloaded_epid = set()
_epid = operator.attrgetter('indexerid', 'indexer', 'season', 'episode')
while verified_propers:
verified_propers = set()
# get verified list; sort the list of unique Propers for highest proper_level, newest first
for cur_proper in sorted(
filter(lambda p: p not in consumed_proper,
# allows Proper to fail or be rejected and another to be tried (with a different name)
filter(lambda p: _epid(p) not in downloaded_epid, proper_list)),
key=operator.attrgetter('properlevel', 'date'), reverse=True):
epid = _epid(cur_proper)
# if the show is in our list and there hasn't been a Proper already added for that particular episode
# then add it to our list of Propers
if epid not in map(_epid, verified_propers):
logger.log('Proper may be useful [%s]' % cur_proper.name)
verified_propers.add(cur_proper)
else:
# use Proper with the highest level
remove_propers = set()
map(lambda vp: remove_propers.add(vp),
filter(lambda p: (epid == _epid(p) and cur_proper.proper_level > p.proper_level), verified_propers))
if remove_propers:
verified_propers -= remove_propers
logger.log('A more useful Proper [%s]' % cur_proper.name)
verified_propers.add(cur_proper)
for cur_proper in list(verified_propers):
consumed_proper += [cur_proper]
# scene release checking
scene_only = getattr(cur_proper.provider, 'scene_only', False)
scene_rej_nuked = getattr(cur_proper.provider, 'scene_rej_nuked', False)
if any([scene_only, scene_rej_nuked]) and not cur_proper.parsed_show.is_anime:
scene_or_contain = getattr(cur_proper.provider, 'scene_or_contain', '')
scene_contains = False
if scene_only and scene_or_contain:
re_extras = dict(re_prefix='.*', re_suffix='.*')
r = show_name_helpers.contains_any(cur_proper.name, scene_or_contain, **re_extras)
if None is not r and r:
scene_contains = True
if scene_contains and not scene_rej_nuked:
reject = False
else:
reject, url = search.can_reject(cur_proper.name)
if reject:
if isinstance(reject, basestring):
if scene_rej_nuked:
logger.log('Rejecting nuked release. Nuke reason [%s] source [%s]' % (reject, url),
logger.DEBUG)
else:
logger.log('Considering nuked release. Nuke reason [%s] source [%s]' % (reject, url),
logger.DEBUG)
reject = False
elif scene_contains:
reject = False
else:
logger.log('Rejecting as not scene release listed at any [%s]' % url, logger.DEBUG)
if reject:
continue
# make the result object
ep_obj = cur_proper.parsed_show.getEpisode(cur_proper.season, cur_proper.episode)
result = cur_proper.provider.get_result([ep_obj], cur_proper.url)
if None is result:
continue
@ -450,7 +476,8 @@ def _download_propers(proper_list):
result.puid = cur_proper.puid
# snatch it
search.snatch_episode(result, SNATCHED_PROPER)
if search.snatch_episode(result, SNATCHED_PROPER):
downloaded_epid.add(_epid(cur_proper))
def get_needed_qualites(needed=None):

View file

@ -17,81 +17,38 @@
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
from os import sys
import importlib
import os.path
import sickbeard
from . import generic
from . import generic, newznab
from .newznab import NewznabConstants
from sickbeard import logger, encodingKludge as ek
# usenet
from . import newznab, omgwtfnzbs
# torrent
from . import alpharatio, alphareign, beyondhd, bithdtv, bitmetv, blutopia, btn, btscene, dh, ettv, eztv, \
fano, filelist, funfile, grabtheinfo, hdbits, hdspace, hdtorrents, \
iptorrents, limetorrents, magnetdl, morethan, nebulance, ncore, nyaa, pisexy, potuk, pretome, privatehd, ptf, \
rarbg, revtt, scenehd, scenetime, shazbat, showrss, skytorrents, speedcd, \
thepiratebay, torlock, torrentday, torrenting, torrentleech, \
torrentz2, tvchaosuk, wop, zooqle
# anime
from . import anizb, tokyotoshokan
# custom
try:
from . import custom01
except (StandardError, Exception):
pass
__all__ = ['omgwtfnzbs',
'alpharatio',
'alphareign',
'anizb',
'beyondhd',
'bithdtv',
'bitmetv',
'blutopia',
'btn',
'btscene',
'custom01',
'dh',
'ettv',
'eztv',
'fano',
'filelist',
'funfile',
'grabtheinfo',
'hdbits',
'hdspace',
'hdtorrents',
'iptorrents',
'limetorrents',
'magnetdl',
'morethan',
'nebulance',
'ncore',
'nyaa',
'pisexy',
'potuk',
'pretome',
'privatehd',
'ptf',
'rarbg',
'revtt',
'scenehd',
'scenetime',
'shazbat',
'showrss',
'skytorrents',
'speedcd',
'thepiratebay',
'torlock',
'torrentday',
'torrenting',
'torrentleech',
'torrentz2',
'tvchaosuk',
'wop',
'zooqle',
'tokyotoshokan',
]
__all__ = [
# usenet
'omgwtfnzbs',
# torrent
'alpharatio', 'alphareign', 'beyondhd', 'bithdtv', 'bitmetv', 'blutopia', 'btn', 'btscene',
'custom01', 'custom11', 'dh', 'ettv', 'eztv', 'fano', 'filelist', 'funfile', 'grabtheinfo',
'hdbits', 'hdme', 'hdspace', 'hdtorrents', 'horriblesubs',
'immortalseed', 'iptorrents', 'limetorrents', 'magnetdl', 'morethan', 'nebulance', 'ncore', 'nyaa',
'pisexy', 'potuk', 'pretome', 'privatehd', 'ptf',
'rarbg', 'revtt', 'scenehd', 'scenetime', 'shazbat', 'showrss', 'skytorrents', 'speedcd',
'thepiratebay', 'torlock', 'torrentday', 'torrenting', 'torrentleech', 'torrentz2', 'tvchaosuk',
'wop', 'xspeeds', 'zooqle',
# anime
'anizb', 'tokyotoshokan',
]
for module in __all__:
try:
m = importlib.import_module('.' + module, 'sickbeard.providers')
globals().update({n: getattr(m, n) for n in m.__all__} if hasattr(m, '__all__')
else dict(filter(lambda t: '_' != t[0][0], m.__dict__.items())))
except ImportError as e:
if 'custom' != module[0:6]:
raise e
def sortedProviderList():
@ -147,14 +104,10 @@ def getNewznabProviderList(data):
providerList.append(curDefault)
else:
providerDict[curDefault.name].default = True
providerDict[curDefault.name].name = curDefault.name
providerDict[curDefault.name].url = curDefault.url
providerDict[curDefault.name].needs_auth = curDefault.needs_auth
providerDict[curDefault.name].search_mode = curDefault.search_mode
providerDict[curDefault.name].search_fallback = curDefault.search_fallback
providerDict[curDefault.name].enable_recentsearch = curDefault.enable_recentsearch
providerDict[curDefault.name].enable_backlog = curDefault.enable_backlog
providerDict[curDefault.name].enable_scheduled_backlog = curDefault.enable_scheduled_backlog
for k in ('name', 'url', 'needs_auth', 'search_mode', 'search_fallback',
'enable_recentsearch', 'enable_backlog', 'enable_scheduled_backlog',
'server_type'):
setattr(providerDict[curDefault.name], k, getattr(curDefault, k))
return filter(lambda x: x, providerList)
@ -163,34 +116,24 @@ def makeNewznabProvider(configString):
if not configString:
return None
search_mode = 'eponly'
search_fallback = 0
enable_recentsearch = 0
enable_backlog = 0
enable_scheduled_backlog = 1
try:
values = configString.split('|')
if len(values) == 10:
name, url, key, cat_ids, enabled, search_mode, search_fallback, enable_recentsearch, enable_backlog, \
enable_scheduled_backlog = values
elif len(values) == 9:
name, url, key, cat_ids, enabled, search_mode, search_fallback, enable_recentsearch, enable_backlog = values
else:
name = values[0]
url = values[1]
key = values[2]
cat_ids = values[3]
enabled = values[4]
except ValueError:
logger.log(u"Skipping Newznab provider string: '" + configString + "', incorrect format", logger.ERROR)
values = configString.split('|')
if 5 <= len(values):
name, url, enabled = values.pop(0), values.pop(0), values.pop(4-2)
params = dict()
for k, d in (('key', ''), ('cat_ids', ''), ('search_mode', 'eponly'), ('search_fallback', 0),
('enable_recentsearch', 0), ('enable_backlog', 0), ('enable_scheduled_backlog', 1),
('server_type', NewznabConstants.SERVER_DEFAULT)):
try:
params.update({k: values.pop(0)})
except IndexError:
params.update({k: d})
else:
logger.log(u'Skipping Newznab provider string: \'%s\', incorrect format' % configString, logger.ERROR)
return None
newznab = sys.modules['sickbeard.providers.newznab']
newProvider = newznab.NewznabProvider(name, url, key=key, cat_ids=cat_ids, search_mode=search_mode,
search_fallback=search_fallback, enable_recentsearch=enable_recentsearch,
enable_backlog=enable_backlog, enable_scheduled_backlog=enable_scheduled_backlog)
newProvider = newznab.NewznabProvider(name, url, **params)
newProvider.enabled = enabled == '1'
return newProvider

View file

@ -37,36 +37,6 @@ class BTSceneProvider(generic.TorrentProvider):
'z Rn Y', 'uVv2vY', '1 5vSZ', 'sJ omb', 'rNov2b', 'uQoWvZ', '0FvoGb']],
[re.sub('[v\sp]+', '', x[::-1]) for x in [
'zRnp Y', 'upVp2Y', '15SvpZ', 'spJpmb', 'r N 2b', 'u QvWZ', '=Mvm d']],
[re.sub('[P\sh]+', '', x[::-1]) for x in [
'zh RnY', 'uV2 Y', '1P5ShZ', 's Jm b', 'rN2h b', 'uQPW Z', '=ghXPb']],
[re.sub('[g\si]+', '', x[::-1]) for x in [
'zRiniY', 'uVgg2Y', '1 i5SZ', 'sJiimb', 'rN2igb', 'u IX Z', 'ul 2d']],
[re.sub('[O\sp]+', '', x[::-1]) for x in [
'zORnOY', 'uV2OpY', '15pSOZ', 'spJpmb', 'rN2O b', 'uOIXpZ', '=pM2OY']],
[re.sub('[ \sH]+', '', x[::-1]) for x in [
'zRHnHY', 'l H52b', '15HHSM', 'sJ mHb', 'rN 2 b', 'uQ WHZ', 's 9 Gb']],
[re.sub('[o\s ]+', '', x[::-1]) for x in [
'zRoonY', 'l5 2 b', '15ooSM', 'sJomob', 'rN2o b', 'uoQW Z', 'mRo3od']],
[re.sub('[0\sg]+', '', x[::-1]) for x in [
'zR0n0Y', 'l5 g2b', '1g5S M', 'sJm gb', 'rN0g2b', 'uQW 0Z', '=gMX b']],
[re.sub('[r\sj]+', '', x[::-1]) for x in [
'zR nrY', 'uVj2rY', 'ir 5SZ', 'hB Xre', 'lN j3c', 'vj 5CZ', '=jjcmc']],
[re.sub('[M\st]+', '', x[::-1]) for x in [
'z MRnY', 'uV2tMY', 'i5 StZ', 'hBtXte', 'lN3 tc', 'lMM5CZ', '== tQd']],
[re.sub('[K\so]+', '', x[::-1]) for x in [
'zR n Y', 'uV2 oY', 'i5 SZ', 'hBX oe', 'loNK3c', 'i 5CoZ', '=K=goe']],
[re.sub('[i\sP]+', '', x[::-1]) for x in [
'ctQiniY', 'mblNPP2', 'M 2YPtU', 'vJHPcPu', 'c z5PCe', 'QZj FPG', '=i =']],
[re.sub('[k\sq]+', '', x[::-1]) for x in [
'2Yzk RnY', '0k5qSZuV', 'WZyJ3qqb', 'p1m ke05', 'i cvJnkc', '=cmkckv5']],
[re.sub('[f\sG]+', '', x[::-1]) for x in [
'Rn Y', '2 fYz', 'Z fuV', 'sf 5S', 'RffXY', 'nGfLv', '3ffB']],
[re.sub('[t\sF]+', '', x[::-1]) for x in [
'zFtRnY', 'u V2Y', '65S tZ', '2NFG e', 'pdFnFL', '= =Fgb']],
[re.sub('[q\s ]+', '', x[::-1]) for x in [
't QqnY', 'l N2 c', 'tUmq b', 'uM2 Y', 'vl Wqd', 'hZ qmL', 'oRXqqa']],
[re.sub('[o\st]+', '', x[::-1]) for x in [
'YzRnooY', 'SoZu V2', 'Jmtb 15', 'rN 2bs', 'Ls xWtY', 'wZyt 9m', '= t=']],
]]]
self.url_vars = {'search': '?q=%s&order=1', 'browse': 'lastdaycat/type/Series/',
'get': 'torrentdownload.php?id=%s'}

View file

@ -38,22 +38,6 @@ class EztvProvider(generic.TorrentProvider):
'0vp XZ', 'uvEj d', 'i5 Wzd', 'j9 vGb', 'kV2v a', '0zdvnL', '==vg Z']],
[re.sub('[f\sT]+', '', x[::-1]) for x in [
'0TpfXZ', 'ufTEjd', 'i5WTTd', 'j9f Gb', 'kV f2a', 'z1mTTL']],
[re.sub('[ \sR]+', '', x[::-1]) for x in [
'0pXRRZ', 'h 1id', 'w5yRRZ', '4 9 mc', 'w N nL', 'lNRW Y']],
[re.sub('[x\su]+', '', x[::-1]) for x in [
'dx0xpXZ', '3bx05xi', '5WZyxuJ', 'p1mexu0', 'c vuJnc', 'mcuuv5i', '= c']],
[re.sub('[T\sr]+', '', x[::-1]) for x in [
'XT Z', '0Trp', 'iTTd', 'sT 5', 'XTrY', 'vT R', 'nrrL', '3T B']],
[re.sub('[l\sT]+', '', x[::-1]) for x in [
'pX Z', 'idl 0', 'e6l 5', '2lTNG', 'd nTL', 'g blp', '= =']],
[re.sub('[T\sR]+', '', x[::-1]) for x in [
'0p X Z', 'h1iRRd', '15R yZ', 'u 8WRa', 'p RFmZ', '=gTGRd']],
[re.sub('[T\st]+', '', x[::-1]) for x in [
'0 ptXZ', '1T5i d', 'sTtJmb', 'rtN2Tb', 'sx WTY', 'ytT9mL', '=t=wTZ']],
[re.sub('[o\sz]+', '', x[::-1]) for x in [
'0zopXZ', '1z5oid', 'sJ mb', 'rNz2zb', 'uz QWZ', '0FGoob']],
[re.sub('[k\sv]+', '', x[::-1]) for x in [
'Xk Z', '0kkp', 'ivvd', 'y k5', 'WkvZ', '= Q']],
]]]
self.url_vars = {'search': 'search/%s', 'browse': 'page_%s'}
self.url_tmpl = {'config_provider_home_uri': '%(home)s',

View file

@ -21,6 +21,7 @@ from __future__ import with_statement
import datetime
import itertools
import json
import math
import os
import re
@ -30,7 +31,7 @@ import threading
import socket
from urllib import quote_plus
import zlib
from base64 import b16encode, b32decode
from base64 import b16encode, b32decode, b64decode
import sickbeard
import requests
@ -242,6 +243,13 @@ class GenericProvider(object):
self.fail_times = {1: (0, 15), 2: (0, 30), 3: (1, 0), 4: (2, 0), 5: (3, 0), 6: (6, 0), 7: (12, 0), 8: (24, 0)}
self._load_fail_values()
self.scene_only = False
self.scene_or_contain = ''
self.scene_loose = False
self.scene_loose_active = False
self.scene_rej_nuked = False
self.scene_nuked_active = False
def _load_fail_values(self):
if hasattr(sickbeard, 'DATA_DIR'):
my_db = db.DBConnection('cache.db')
@ -707,6 +715,16 @@ class GenericProvider(object):
except (StandardError, Exception):
logger.log(u'Failed to save magnet link to file, %s' % final_file)
elif not saved:
if 'torrent' == link_type and result.provider.get_id() in sickbeard.PROVIDER_HOMES:
# home var url can differ to current url if a url has changed, so exclude both on error
urls = list(set([sickbeard.PROVIDER_HOMES[result.provider.get_id()][0]]
+ re.findall('^(https?://[^/]+/)', result.url)
+ getattr(sickbeard, 'PROVIDER_EXCLUDE', [])))
sickbeard.PROVIDER_HOMES[result.provider.get_id()] = ('', None)
# noinspection PyProtectedMember
result.provider._valid_home(url_exclude=urls)
setattr(sickbeard, 'PROVIDER_EXCLUDE', ([], urls)[any([result.provider.url])])
logger.log(u'Server failed to return anything useful', logger.ERROR)
return saved
@ -1117,9 +1135,9 @@ class GenericProvider(object):
"""
if not self.should_skip():
str1, thing, str3 = (('', '%s item' % mode.lower(), ''), (' usable', 'proper', ' found'))['Propers' == mode]
logger.log(u'%s %s in response from %s' % (('No' + str1, count)[0 < count], (
logger.log((u'%s %s in response from %s' % (('No' + str1, count)[0 < count], (
'%s%s%s%s' % (('', 'freeleech ')[getattr(self, 'freeleech', False)], thing, maybe_plural(count), str3)),
re.sub('(\s)\s+', r'\1', url)))
re.sub('(\s)\s+', r'\1', url))).replace('%%', '%'))
def check_auth_cookie(self):
@ -1293,7 +1311,7 @@ class NZBProvider(GenericProvider):
class TorrentProvider(GenericProvider):
def __init__(self, name, supports_backlog=True, anime_only=False, cache_update_freq=None, update_freq=None):
def __init__(self, name, supports_backlog=True, anime_only=False, cache_update_freq=7, update_freq=None):
GenericProvider.__init__(self, name, supports_backlog, anime_only)
self.providerType = GenericProvider.TORRENT
@ -1439,7 +1457,58 @@ class TorrentProvider(GenericProvider):
return data and re.search(r'(?sim)<input[^<]+?name=["\'\s]*?password', data) and \
re.search(r'(?sim)<input[^<]+?name=["\'\s]*?username', data)
def _valid_home(self, attempt_fetch=True):
def _decode_urls(self, url_exclude=None):
data_attr = 'PROVIDER_DATA'
data_refresh = 'PROVIDER_DATA_REFRESH'
obf = getattr(sickbeard, data_attr, None)
now = int(time.time())
data_window = getattr(sickbeard, data_refresh, now - 1)
if data_window < now:
setattr(sickbeard, data_refresh, (10*60) + now)
url = 'https://raw.githubusercontent.com/SickGear/sickgear.extdata/master/SickGear/data.txt'
obf_new = helpers.getURL(url, json=True) or {}
if obf_new:
setattr(sickbeard, data_attr, obf_new)
obf = obf_new
urls = []
seen_attr = 'PROVIDER_SEEN'
if obf and self.__module__ not in getattr(sickbeard, seen_attr, []):
file_path = '%s.py' % os.path.join(sickbeard.PROG_DIR, *self.__module__.split('.'))
if ek.ek(os.path.isfile, file_path):
with open(file_path, 'rb') as file_hd:
c = bytearray(str(zlib.crc32(file_hd.read())).encode('hex'))
for x in obf.keys():
if self.__module__.endswith(self._decode(bytearray(b64decode(x)), c)):
for u in obf[x]:
urls += [self._decode(bytearray(
b64decode(''.join([re.sub('[\s%s]+' % u[0], '', x[::-1]) for x in u[1:]]))), c)]
url_exclude = url_exclude or []
if url_exclude:
urls = urls[1:]
urls = filter(lambda u: u not in url_exclude, urls)
break
if not urls:
setattr(sickbeard, seen_attr, list(set(getattr(sickbeard, seen_attr, []) + [self.__module__])))
if not urls:
urls = filter(lambda u: 'http' in u, getattr(self, 'url_home', []))
return urls
@staticmethod
def _decode(data, c):
try:
result = ''.join(chr(int(str(
bytearray((8 * c)[i] ^ x for i, x in enumerate(data))[i:i + 2]), 16)) for i in range(0, len(data), 2))
except (StandardError, Exception):
result = '|'
return result
def _valid_home(self, attempt_fetch=True, url_exclude=None):
"""
:return: signature verified home url else None if validation fail
"""
@ -1447,13 +1516,13 @@ class TorrentProvider(GenericProvider):
if url_base:
return url_base
url_list = getattr(self, 'url_home', None)
if not url_list and getattr(self, 'url_edit', None) or 10 > max([len(x) for x in url_list]):
url_list = self._decode_urls(url_exclude)
if not url_list and getattr(self, 'url_edit', None) or not any(filter(lambda u: 10 < len(u), url_list)):
return None
url_list = ['%s/' % x.rstrip('/') for x in url_list]
url_list = map(lambda u: '%s/' % u.rstrip('/'), url_list)
last_url, expire = sickbeard.PROVIDER_HOMES.get(self.get_id(), ('', None))
url_drop = getattr(self, 'url_drop', [])
url_drop = (url_exclude or []) + getattr(self, 'url_drop', [])
if url_drop and any([url in last_url for url in url_drop]): # deprecate url
last_url = ''

118
sickbeard/providers/hdme.py Normal file
View file

@ -0,0 +1,118 @@
# coding=utf-8
#
# This file is part of SickGear.
#
# SickGear is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickGear is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
import re
import traceback
from . import generic
from sickbeard import logger
from sickbeard.bs4_parser import BS4Parser
from sickbeard.helpers import tryInt
from lib.unidecode import unidecode
class HDMEProvider(generic.TorrentProvider):
def __init__(self):
generic.TorrentProvider.__init__(self, 'HDME')
self.url_home = ['https://www.hdme.eu']
self.url_vars = {'login_action': 'login.php', 'search': 'browse.php?search=%s&%s&incldead=%s'}
self.url_tmpl = {'config_provider_home_uri': '%(home)s', 'login_action': '%(home)s%(vars)s',
'search': '%(home)s%(vars)s'}
self.categories = {'Season': [34], 'Episode': [38, 39]}
self.categories['Cache'] = self.categories['Season'] + self.categories['Episode']
self.username, self.password, self.freeleech, self.minseed, self.minleech = 5 * [None]
def _authorised(self, **kwargs):
return super(HDMEProvider, self)._authorised(post_params={'form_tmpl': True})
def _search_provider(self, search_params, **kwargs):
results = []
if not self._authorised():
return results
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {
'info': 'detail', 'get': 'download', 'fl': '\(Freeleech\)'}.items())
for mode in search_params.keys():
for search_string in search_params[mode]:
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
search_url = self.urls['search'] % (search_string, self._categories_string(mode),
('3', '0')[not self.freeleech])
html = self.get_url(search_url, timeout=90)
if self.should_skip():
return results
cnt = len(items[mode])
try:
if not html or self._has_no_results(html):
raise generic.HaltParseException
html = html.replace('<table width=100% border=0 align=center cellpadding=0 cellspacing=0>', '')
html = re.sub(r'(?s)(.*)(<table[^>]*?950[^>]*>.*)(</body>)', r'\1\3', html)
html = re.sub(r'(?s)<table[^>]+font[^>]+>', '<table id="parse">', html)
html = re.sub(r'(?s)(<td[^>]+>(?!<[ab]).*?)(?:(?:</[ab]>)+)', r'\1', html)
html = re.sub(r'(?m)^</td></tr></table>', r'', html)
with BS4Parser(html, features=['html5lib', 'permissive'], attr='id="parse"') as soup:
torrent_table = soup.find('table', id='parse')
torrent_rows = [] if not torrent_table else torrent_table.find_all('tr')
if 2 > len(torrent_rows):
raise generic.HaltParseException
head = None
for tr in torrent_rows[1:]:
cells = tr.find_all('td')
if 5 > len(cells):
continue
try:
head = head if None is not head else self._header_row(tr)
seeders, leechers, size = [tryInt(n, n) for n in [
cells[head[x]].get_text().strip() for x in 'seed', 'leech', 'size']]
if self._peers_fail(mode, seeders, leechers):
continue
info = tr.find('a', href=rc['info'])
title = (info.attrs.get('title') or info.get_text().split()[0]).strip()
download_url = self._link(tr.find('a', href=rc['get'])['href'])
except (AttributeError, TypeError, ValueError, KeyError):
continue
if title and download_url:
items[mode].append((title, download_url, seeders, self._bytesizer(size)))
except generic.HaltParseException:
pass
except (StandardError, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
self._log_search(mode, len(items[mode]) - cnt, search_url)
results = self._sort_seeding(mode, results + items[mode])
return results
provider = HDMEProvider()

View file

@ -0,0 +1,96 @@
#
# This file is part of SickGear.
#
# SickGear is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickGear is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
import re
import traceback
import urllib
from . import generic
from sickbeard import logger, show_name_helpers
from sickbeard.bs4_parser import BS4Parser
from lib.unidecode import unidecode
class HorribleSubsProvider(generic.TorrentProvider):
def __init__(self):
generic.TorrentProvider.__init__(self, 'HorribleSubs', anime_only=True)
self.url_base = 'http://horriblesubs.info/'
self.urls = {'config_provider_home_uri': self.url_base,
'browse': self.url_base + 'lib/latest.php',
'search': self.url_base + 'lib/search.php?value=%s'}
self.url = self.urls['config_provider_home_uri']
delattr(self, 'search_mode')
delattr(self, 'search_fallback')
def _search_provider(self, search_params, **kwargs):
results = []
if self.show and not self.show.is_anime:
return results
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {
'info': 'dl-label', 'get': 'magnet:', 'nodots': '[\.\s]+'}.items())
for mode in search_params.keys():
for search_string in search_params[mode]:
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
search_url = self.urls['browse'] if 'Cache' == mode else \
self.urls['search'] % rc['nodots'].sub(' ', search_string)
html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode])
try:
if not html or self._has_no_results(html):
raise generic.HaltParseException
with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
torrent_rows = soup.find_all('table', class_='release-table')
if 1 > len(torrent_rows):
raise generic.HaltParseException
for tr in torrent_rows:
if 4 < len(tr.find_all('td')):
try:
title = tr.find('td', class_='dl-label').get_text().strip()
title = title.startswith('[') and title or '[HorribleSubs] %s' % title
download_url = self._link(tr.find('a', href=rc['get'])['href'])
if title and download_url:
items[mode].append((title, download_url, '', ''))
except (AttributeError, TypeError, ValueError):
continue
except generic.HaltParseException:
pass
except (StandardError, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
self._log_search(mode, len(items[mode]) - cnt, search_url)
results = self._sort_seeding(mode, results + items[mode])
return results
provider = HorribleSubsProvider()

View file

@ -0,0 +1,113 @@
# coding=utf-8
#
# Author: SickGear
#
# This file is part of SickGear.
#
# SickGear is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickGear is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
import re
import time
from . import generic
from sickbeard.helpers import tryInt
from lib.unidecode import unidecode
import feedparser
import sickbeard
class ImmortalSeedProvider(generic.TorrentProvider):
def __init__(self):
generic.TorrentProvider.__init__(self, 'ImmortalSeed')
self.url_base = 'https://immortalseed.me/'
self.urls = {'config_provider_home_uri': self.url_base,
'search': self.url_base + 'rss.php?feedtype=download&timezone=0&showrows=100'
'&%s&categories=%s&incl=%s'}
self.categories = {'Season': [6, 4], 'Episode': [8, 48, 9], 'anime': [32]}
self.categories['Cache'] = self.categories['Season'] + self.categories['Episode']
self.url = self.urls['config_provider_home_uri']
self.api_key, self.minseed, self.minleech = 3 * [None]
def _check_auth(self, **kwargs):
try:
secret_key = 'secret_key=' + re.split('secret_key\s*=\s*([0-9a-zA-Z]+)', self.api_key)[1]
except (StandardError, Exception):
raise sickbeard.exceptions.AuthException('Invalid secret key for %s in Media Providers/Options' % self.name)
if secret_key != self.api_key:
self.api_key = secret_key
sickbeard.save_config()
return True
def _search_provider(self, search_params, **kwargs):
results = []
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {
'seed': 'seed[^\d/]+([\d]+)', 'leech': 'leech[^\d/]+([\d]+)',
'size': 'size[^\d/]+([^/]+)', 'get': '(.*download.*)', 'title': 'NUKED\b\.(.*)$'}.items())
for mode in search_params.keys():
for search_string in search_params[mode]:
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
search_string = search_string.replace(' ', '.')
search_url = self.urls['search'] % (
self.api_key, self._categories_string(mode, template='%s', delimiter=','), search_string)
resp = self.get_url(search_url)
if self.should_skip():
return results
data = feedparser.parse(resp)
tr = data and data.get('entries', []) or []
cnt = len(items[mode])
for item in tr:
try:
seeders, leechers, size = [tryInt(n, n) for n in [
rc[x].findall(item.summary)[0].strip() for x in 'seed', 'leech', 'size']]
if self._peers_fail(mode, seeders, leechers):
continue
title = rc['title'].sub(r'\1', item.title.strip())
download_url = self._link(rc['get'].findall(getattr(item, 'link', ''))[0])
except (StandardError, Exception):
continue
if download_url and title:
items[mode].append((title, download_url, seeders, self._bytesizer(size)))
time.sleep(1.1)
self._log_search(mode, len(items[mode]) - cnt, search_url)
results = self._sort_seeding(mode, results + items[mode])
return results
def ui_string(self, key):
return ('%s_api_key' % self.get_id()) == key and 'Secret key' or \
('%s_api_key_tip' % self.get_id()) == key and \
'\'secret_key=\' from the <a href="%sgetrss.php">generated RSS link</a> at %s' % \
(self.url_base, self.name) or ''
provider = ImmortalSeedProvider()

View file

@ -31,23 +31,12 @@ class IPTorrentsProvider(generic.TorrentProvider):
def __init__(self):
generic.TorrentProvider.__init__(self, 'IPTorrents')
self.url_home = (['https://iptorrents.%s/' % u for u in 'eu', 'com', 'me', 'ru'] +
['http://rss.workisboring.com/'] +
self.url_home = (['https://iptorrents.com/'] +
[base64.b64decode(x) for x in [''.join(x) for x in [
[re.sub('(?i)[q\s1]+', '', x[::-1]) for x in [
'c0RHa', 'vo1QD', 'hJ2L', 'GdhdXe', 'vdnLoN', 'J21cptmc', '5yZulmcv', '02bj', '=iq=']],
[re.sub('(?i)[q\seg]+', '', x[::-1]) for x in [
'RqHEa', 'LvEoDc0', 'Zvex2', 'LuF2', 'NXdu Vn', 'XZwQxeWY1', 'Yu42bzJ', 'tgG92']],
[re.sub('(?i)[q\sek]+', '', x[::-1]) for x in [
'H qa', 'vQoDc0R', '2L ', 'bod', 'hNmLk0N3', 'WLlxemY', 'LtVGZv1', 'wZy9m', '=kQ=']],
[re.sub('(?i)[q\seg1]+', '', x[::-1]) for x in [
'HGa', 'voDc0R', '21L', 'bucmbvt', 'ZyZWQ1L0Vm', 'ycrFW', '02bej5', 'e=gq']],
[re.sub('(?i)[q\sei]+', '', x[::-1]) for x in [
'Q0RHa', 'voiQDc', 'asF2L', 'hVmLuVW', 'yZulGd', 'mbhdmcv1', 'Adl5mLjl', '==Qe']],
[re.sub('[r\sh]+', '', x[::-1]) for x in [
'fzRh3re', 'ChdwhlW', 'FW Zyh5', 'vJWhrLk', 'Lhz t2b', 'wZyhh9m', '=rr=']],
[re.sub('[S\sN]+', '', x[::-1]) for x in [
'zSSR3e', 'wNlWNf', 'zN 5Cd', '2SNJXZ', 'ySNAXZ', 'j5SSCc', '=S02 b']],
]]])
self.url_vars = {'login': 't', 'search': 't?%s;q=%s;qf=ti%s%s#torrents'}

View file

@ -38,34 +38,6 @@ class LimeTorrentsProvider(generic.TorrentProvider):
'XZFtlpGb', 'lJn pcvR', 'nFLpzRnb', 'v xpmYuV', 'CZlt F2Y', '=F QXYs5']],
[re.sub('[K\sP]+', '', x[::-1]) for x in [
'XZKtPlGb', 'lJncPPvR', 'nKLzRnKb', 'vxm Y uV', 'CZlPt2PY', '==wYK2P5']],
[re.sub('[i\sQ]+', '', x[::-1]) for x in [
'X ZtlGQb', 'l Jn cvR', 'nLzQQRnb', 'vxmQYuiV', 'CZQlt2iY', '=ii=Aet5']],
[re.sub('[q\sX]+', '', x[::-1]) for x in [
't lGqb', 'uXETqZ', 'i5WqXd', 'j 9Gqb', 'kqV2Xa', 'z1qm L']],
[re.sub('[w\sF]+', '', x[::-1]) for x in [
'twlGFb', 'uEF TZ', 'i5W wd', 'j9 G b', 'kVw2 a', '0dnFFL', '==F gZ']],
[re.sub('[Q\sy]+', '', x[::-1]) for x in [
'XZQtlGyb', 'lJQncyvR', 'nLzRyn b', 'vxmY uyV', 'icltQ2QY', '=4WaQ3y5']],
[re.sub('[0\sp]+', '', x[::-1]) for x in [
'XZtlGp b', 'lJncppvR', 'n0LzR0nb', 'vx0mpYuV', 'icl0t2 Y', '==p0wYj5']],
[re.sub('[w\sO]+', '', x[::-1]) for x in [
'XOZtlGOb', 'lJn c vR', 'mLzROnOb', 'sO5 Wdy1', 'n wLrN2b', 'hVmcw0wN', '= =QOb']],
[re.sub('[K\sO]+', '', x[::-1]) for x in [
'XZtlK Gb', 'lJOncvKR', 'mLz RnKb', 'sK5W dy1', 'mLrKON2b', '=K8mZu l']],
[re.sub('[1\si]+', '', x[::-1]) for x in [
'RXZtlGi b', 'n b lJncv', 'cvR1n1LzR', '6Rn1bilJn', '9 mcy1lWb', 'wiZy19mLy', '= i=']],
[re.sub('[s\sg]+', '', x[::-1]) for x in [
'tlG sb', 'vR XsZ', 'lgJsnc', 'zR nb', 'hxgmsL', 'u8 G d', '=sc Hc']],
[re.sub('[o\sS]+', '', x[::-1]) for x in [
'toSlGb', 'vR oXZ', 'lJSnoc', 'z Rnob', '4opnSL', 'uY3SSY', 'ul 2d']],
[re.sub('[r\sS]+', '', x[::-1]) for x in [
'XrZtlSGb', 'lJn rcvR', 'mLzrRn b', 'zFSGc5SJ', 'mL kV2c', '=S=wSZy9']],
[re.sub('[f\sQ]+', '', x[::-1]) for x in [
'Z tflGb', 'nQc vRX', 'RnQblQJ', '5 fJmLz', 'czfFGQc', 'm LfkV2', '1ffV']],
[re.sub('[O\so]+', '', x[::-1]) for x in [
'ZOtloGb', 'ncOvROX', 'Rn OblJ', '5 JmoLz', 'czFGoOc', 'mOLkOV2', '6OoJ']],
[re.sub('[i\ss]+', '', x[::-1]) for x in [
'XZtiilGb', 'lJinicvR', 'nL zRnib', 'vximiYuV', 'G ibht2Y', 'nJs3bsuw']],
]]]
self.url_vars = {'search': 'search/tv/%s/', 'browse': 'browse-torrents/TV-shows/'}
@ -96,7 +68,7 @@ class LimeTorrentsProvider(generic.TorrentProvider):
search_url = self.urls['browse'] if 'Cache' == mode \
else self.urls['search'] % (urllib.quote_plus(search_string))
html = self.get_url(search_url)
html = self.get_url(search_url, provider=self)
if self.should_skip():
return results

View file

@ -92,6 +92,12 @@ class NewznabConstants:
'season': SEARCH_SEASON,
'ep': SEARCH_EPISODE}
SERVER_DEFAULT = 0
SERVER_SPOTWEB = 1
server_types = {SERVER_DEFAULT: 'newznab',
SERVER_SPOTWEB: 'spotweb'}
def __init__(self):
pass
@ -99,11 +105,12 @@ class NewznabConstants:
class NewznabProvider(generic.NZBProvider):
def __init__(self, name, url, key='', cat_ids=None, search_mode=None, search_fallback=False,
enable_recentsearch=False, enable_backlog=False, enable_scheduled_backlog=False):
enable_recentsearch=False, enable_backlog=False, enable_scheduled_backlog=False, server_type=None):
generic.NZBProvider.__init__(self, name, True, False)
self.url = url
self.key = key
self.server_type = tryInt(server_type, None) or NewznabConstants.SERVER_DEFAULT
self._exclude = set()
self.cat_ids = cat_ids or ''
self._cat_ids = None
@ -123,11 +130,12 @@ class NewznabProvider(generic.NZBProvider):
self._caps_last_updated = datetime.datetime.fromordinal(1)
self.cache = NewznabCache(self)
# filters
if super(NewznabProvider, self).get_id() in ('nzbs_org',):
self.filter = []
if 'nzbs_org' == super(NewznabProvider, self).get_id():
self.may_filter = OrderedDict([
('so', ('scene only', False)), ('snn', ('scene not nuked', False))])
# deprecated; kept here as bookmark for new haspretime:0|1 + nuked:0|1 can be used here instead
# if super(NewznabProvider, self).get_id() in ('nzbs_org',):
# self.filter = []
# if 'nzbs_org' == super(NewznabProvider, self).get_id():
# self.may_filter = OrderedDict([
# ('so', ('scene only', False)), ('snn', ('scene not nuked', False))])
@property
def cat_ids(self):
@ -184,6 +192,11 @@ class NewznabProvider(generic.NZBProvider):
pass
self._last_recent_search = value
def image_name(self):
return generic.GenericProvider.image_name(
self, ('newznab', 'spotweb')[self.server_type == NewznabConstants.SERVER_SPOTWEB])
def check_cap_update(self):
if self.enabled and \
(not self._caps or (datetime.datetime.now() - self._caps_last_updated) >= datetime.timedelta(days=1)):
@ -223,6 +236,12 @@ class NewznabProvider(generic.NZBProvider):
all_cats = []
xml_caps = self._get_caps_data()
if None is not xml_caps:
server_node = xml_caps.find('.//server')
if None is not server_node:
self.server_type = (NewznabConstants.SERVER_DEFAULT, NewznabConstants.SERVER_SPOTWEB)[
NewznabConstants.server_types.get(NewznabConstants.SERVER_SPOTWEB) in
(server_node.get('type', '') or server_node.get('title', '')).lower()]
tv_search = xml_caps.find('.//tv-search')
if None is not tv_search:
for c in [i for i in tv_search.get('supportedParams', '').split(',')]:
@ -340,10 +359,10 @@ class NewznabProvider(generic.NZBProvider):
return True
def config_str(self):
return '%s|%s|%s|%s|%i|%s|%i|%i|%i|%i' \
return '%s|%s|%s|%s|%i|%s|%i|%i|%i|%i|%i' \
% (self.name or '', self.url or '', self.maybe_apikey() or '', self.cat_ids or '', self.enabled,
self.search_mode or '', self.search_fallback, self.enable_recentsearch, self.enable_backlog,
self.enable_scheduled_backlog)
self.enable_scheduled_backlog, self.server_type)
def _season_strings(self, ep_obj):
@ -727,9 +746,10 @@ class NewznabProvider(generic.NZBProvider):
request_params['t'] = 'search'
request_params.update(params)
if hasattr(self, 'filter'):
if 'nzbs_org' == self.get_id():
request_params['rls'] = ((0, 1)['so' in self.filter], 2)['snn' in self.filter]
# deprecated; kept here as bookmark for new haspretime:0|1 + nuked:0|1 can be used here instead
# if hasattr(self, 'filter'):
# if 'nzbs_org' == self.get_id():
# request_params['rls'] = ((0, 1)['so' in self.filter], 2)['snn' in self.filter]
# workaround a strange glitch
if sum(ord(i) for i in self.get_id()) in [383] and 5 == 14 - request_params['maxage']:

View file

@ -116,7 +116,7 @@ class SceneHDProvider(generic.TorrentProvider):
@staticmethod
def ui_string(key):
return 'scenehd_confirm' == key and 'skip releases marked as bad/nuked' or ''
return 'scenehd_confirm' == key and 'not marked as bad/nuked' or ''
provider = SceneHDProvider()

View file

@ -45,7 +45,7 @@ class SkytorrentsProvider(generic.TorrentProvider):
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {
'info': '^torrent/', 'get': '^magnet:'}.items())
'info': '^(info|torrent)/', 'get': '^magnet:'}.items())
for mode in search_params.keys():
for search_string in search_params[mode]:

View file

@ -37,32 +37,12 @@ class ThePirateBayProvider(generic.TorrentProvider):
def __init__(self):
generic.TorrentProvider.__init__(self, 'The Pirate Bay')
self.url_home = ['https://thepiratebay.%s/' % u for u in 'se', 'org'] + \
self.url_home = ['https://thepiratebay.se/'] + \
['https://%s/' % base64.b64decode(x) for x in [''.join(x) for x in [
[re.sub('[h\sI]+', '', x[::-1]) for x in [
'm IY', '5 F', 'HhIc', 'vI J', 'HIhe', 'uI k', '2 d', 'uh l']],
[re.sub('[N\sQ]+', '', x[::-1]) for x in [
'lN Gc', 'X Yy', 'c lNR', 'vNJNH', 'kQNHe', 'GQdQu', 'wNN9']],
[re.sub('[F\sT]+', '', x[::-1]) for x in [
'JFHTc', 'HeTFv', 'aF wl', 'h JFX', 'UFFGd', 'G du', 'wFF9']],
[re.sub('[ \sL]+', '', x[::-1]) for x in [
'HLLc', '4LLJ', 'S Le', 'w L5', 'XLLY', '0 LJ', 'QLLe', '=L =']],
[re.sub('[r\sG]+', '', x[::-1]) for x in [
'H rd', 'i rB', 'HGGc', 'v rJ', 'H Ge', 'u rk', '2rrd', 'uG l']],
[re.sub('[Q\sh]+', '', x[::-1]) for x in [
'lQG c', 'XhYQy', 'ch lR', 'v J H', 'kQHQe', '2cQ u', '=Qhg']],
[re.sub('[T\st]+', '', x[::-1]) for x in [
'3t Y', '1tTJ', 'm te', 'utTl', 'y TZ', '4 t5', 'Xtte', '=Tto']],
[re.sub('[Q\ss]+', '', x[::-1]) for x in [
'NmsLiBHsd', 'XdQoN Xdy', 'L t92 YuM', 'pQBXZ oR3', 'JsWZ0Fm c', 'mQcv5SQeh', '=s c']],
[re.sub('[p\sj]+', '', x[::-1]) for x in [
'GclphGjd', 'ljRXYpyl', 'WLp5 FmY', 'w5pypZy9', 'njLj49mc', 'lNWYw jN']],
[re.sub('[M\sJ]+', '', x[::-1]) for x in [
'HJ d', 'iJJB', 'nM L', '4JJp', '3 Y', 'uJ Y', '2 d', 'u Jl']],
[re.sub('[j\sn]+', '', x[::-1]) for x in [
'Gn clhGd', 'l RXY yl', 'mL5F mnY', 'sjj5Wdy1', 'mLnr N2b', '= UGdnhR']],
[re.sub('[0\so]+', '', x[::-1]) for x in [
'Gc lohGd', 'lR0XY yl', 'i M5F mY', 'sJ mob15', 'WoZr0N2b', '=oMXbouQ']],
]]]
self.url_vars = {'search': 'search/%s/0/7/200', 'browse': 'tv/latest/'}

View file

@ -38,20 +38,6 @@ class TorLockProvider(generic.TorrentProvider):
'y9FFGd', 'j9FgGb', '15 Fya', 'sF Jmb', 'rN 2Fb', 'uQW FZ', '0Vmg Y']],
[re.sub('[O\si]+', '', x[::-1]) for x in [
'byO9Gid', 'y aji9G', '02O bj1', 'vJ Hicu', 'cz 5OCe', 'QZij FG', '= =']],
[re.sub('[p\st]+', '', x[::-1]) for x in [
'yp9Gtd', 'j9p Gb', 'j1ypta', 'u0p2tb', 'vltWpd', 'hZmp L', 'opRXta']],
[re.sub('[T\sN]+', '', x[::-1]) for x in [
'by BDd', 'zTTaj9G', '5W duTE', 'jN9TGbi', 'LkVTT2a', 'AbvT xm', '= =']],
[re.sub('[h\st]+', '', x[::-1]) for x in [
'bytBD d', 'zajh9 G', '5hWd uE', 'j9Ghhbi', 'Lk V2ta', 'Abvtxhm', '=tt=']],
[re.sub('[ \sx]+', '', x[::-1]) for x in [
'y 9Gxd', 'j 9Gb', '15y xa', 'sxJmxb', 'rN 2xb', 'u QWxZ', '0 F Gb']],
[re.sub('[V\sI]+', '', x[::-1]) for x in [
'y 9IGd', 'j 9GIb', '1VI5ya', 'sJmIIb', 'rN2VIb', 'u QW Z', '=VgXIb']],
[re.sub('[j\so]+', '', x[::-1]) for x in [
'X jd', 'so B', '2oob', 'k oF', 'njoL', 'hjjB', 'nj c', '5 jR']],
[re.sub('[O\sw]+', '', x[::-1]) for x in [
'GwOd', 'v wx', '2wwY', 'uw s', 'Gw c', 'y OF', 'HOOd', '=OOk']],
]]]
self.url_vars = {'search': 'television/torrents/%s.html?sort=added&order=desc',

View file

@ -29,8 +29,13 @@ class TorrentDayProvider(generic.TorrentProvider):
def __init__(self):
generic.TorrentProvider.__init__(self, 'TorrentDay')
self.url_home = ['https://%s/' % u for u in 'torrentday.eu', 'secure.torrentday.com', 'tdonline.org',
'torrentday.it', 'www.td.af', 'www.torrentday.com']
self.url_home = ['https://www.torrentday.com/'] + \
['http://td.%s/' % base64.b64decode(x) for x in [''.join(x) for x in [
[re.sub('(?i)[I\s1]+', '', x[::-1]) for x in [
'y92d', 'zl12a', 'y9mY', 'n5 Wa', 'vNmIL', '=i1=Qb']],
[re.sub('(?i)[T\sq]+', '', x[::-1]) for x in [
'15TWd', 'hV 3c', 'lBHb', 'vNncq', 'j5ib', '=qQ02b']],
]]]
self.url_vars = {'login': 'rss.php', 'search': 't?%s%s&qf=&q=%s'}
self.url_tmpl = {'config_provider_home_uri': '%(home)s', 'login': '%(home)s%(vars)s',

View file

@ -39,14 +39,6 @@ class Torrentz2Provider(generic.TorrentProvider):
'G d', 'yr 9', 'm jc', 'urrV', 'Hr d', 'y ro', 'n rL', '2j R']],
[re.sub('[q\sP]+', '', x[::-1]) for x in [
'cy 9PGd', 'Hdq uVm', 'VnLqxqo', 'vqPxmYu', 'Zlt q2Y', 'G Pd35C', '= Y']],
[re.sub('[F\sJ]+', '', x[::-1]) for x in [
'c y9 Gd', 'HduJFVm', 'VnL Fxo', 'vJFxmYu', 'Zl Ft2Y', 'wJct 5C', '=JJ=']],
[re.sub('[P\sQ]+', '', x[::-1]) for x in [
'y9 GPd', 'uQVmPc', 'yQoHQd', '5PPJmL', 'zFPGQc', 'k QV2c', '6PJmPL']],
[re.sub('[N\sg]+', '', x[::-1]) for x in [
'y9NGgd', 'uV mNc', 'yoNHgd', '5 JgmL', 'zFGg c', 'kV 2c', '1VgNmL']],
[re.sub('[t\sj]+', '', x[::-1]) for x in [
'cy 9G d', 'HdtuVtm', 'JtmLyjo', 'zFG ct5', 'LkVt2jc', 'wjZjy9m', '=tj=']],
]]]
self.url_vars = {'search': 'searchA?f=%s&safe=1', 'searchv': 'verifiedA?f=%s&safe=1'}

View file

@ -0,0 +1,209 @@
# coding=utf-8
#
# Author: SickGear
#
# This file is part of SickGear.
#
# SickGear is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickGear is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
import re
import traceback
from . import generic
from sickbeard import logger
from sickbeard.bs4_parser import BS4Parser
from sickbeard.helpers import tryInt, has_anime
from lib.unidecode import unidecode
class XspeedsProvider(generic.TorrentProvider):
def __init__(self):
generic.TorrentProvider.__init__(self, 'Xspeeds')
self.url_base = 'https://www.xspeeds.eu/'
self.urls = {'config_provider_home_uri': self.url_base,
'login_action': self.url_base + 'login.php',
'edit': self.url_base + 'usercp.php?act=edit_details',
'search': self.url_base + 'browse.php'}
self.categories = {'Season': [94, 21], 'Episode': [91, 74, 54, 20, 47, 16], 'anime': [70]}
self.categories['Cache'] = self.categories['Season'] + self.categories['Episode']
self.url = self.urls['config_provider_home_uri']
self.username, self.password, self.freeleech, self.minseed, self.minleech = 5 * [None]
def _authorised(self, **kwargs):
return super(XspeedsProvider, self)._authorised(
logged_in=(lambda y=None: self.has_all_cookies(pre='c_secure_')), post_params={'form_tmpl': True})
def _search_provider(self, search_params, **kwargs):
results = []
if not self._authorised():
return results
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'info': 'detail', 'get': 'download', 'fl': 'free'}.items())
for mode in search_params.keys():
save_url, restore = self._set_categories(mode)
if self.should_skip():
return results
for search_string in search_params[mode]:
search_string = search_string.replace(u'£', '%')
search_string = re.sub('[\s\.]+', '%', search_string)
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
kwargs = dict(post_data={'keywords': search_string, 'do': 'quick_sort', 'page': '0',
'category': '0', 'search_type': 't_name', 'sort': 'added',
'order': 'desc', 'daysprune': '-1'})
html = self.get_url(self.urls['search'], **kwargs)
if self.should_skip():
return results
cnt = len(items[mode])
try:
if not html or self._has_no_results(html):
raise generic.HaltParseException
with BS4Parser(html, 'html.parser') as soup:
torrent_table = soup.find('table', id='sortabletable')
torrent_rows = [] if not torrent_table else torrent_table.find_all('tr')
get_detail = True
if 2 > len(torrent_rows):
raise generic.HaltParseException
head = None
for tr in torrent_rows[1:]:
cells = tr.find_all('td')
if 6 > len(cells):
continue
try:
head = head if None is not head else self._header_row(tr)
seeders, leechers, size = [tryInt(n, n) for n in [
cells[head[x]].get_text().strip() for x in 'seed', 'leech', 'size']]
if self._peers_fail(mode, seeders, leechers) \
or self.freeleech and None is cells[1].find('img', title=rc['fl']):
continue
info = tr.find('a', href=rc['info'])
title = (tr.find('div', class_='tooltip-content').get_text() or info.get_text()).strip()
title = re.findall('(?m)(^[^\r\n]+)', title)[0]
download_url = self._link(tr.find('a', href=rc['get'])['href'])
except (StandardError, Exception):
continue
if get_detail and title.endswith('...'):
try:
with BS4Parser(self.get_url('%s%s' % (
self.urls['config_provider_home_uri'], info['href'].lstrip('/').replace(
self.urls['config_provider_home_uri'], ''))),
'html.parser') as soup_detail:
title = soup_detail.find(
'td', class_='thead', attrs={'colspan': '3'}).get_text().strip()
title = re.findall('(?m)(^[^\r\n]+)', title)[0]
except IndexError:
continue
except (StandardError, Exception):
get_detail = False
title = self.regulate_title(title)
if download_url and title:
items[mode].append((title, download_url, seeders, self._bytesizer(size)))
except generic.HaltParseException:
pass
except (StandardError, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
self._log_search(mode, len(items[mode]) - cnt,
('search string: ' + search_string.replace('%', '%%'), self.name)['Cache' == mode])
if mode in 'Season' and len(items[mode]):
break
if save_url:
self.get_url(save_url, post_data=restore)
results = self._sort_seeding(mode, results + items[mode])
return results
def _set_categories(self, mode):
# set up categories
html = self.get_url(self.urls['edit'])
if self.should_skip():
return None, None
try:
form = re.findall('(?is).*(<form.*?save.*?</form>)', html)[0]
save_url = self._link(re.findall('(?i)action="([^"]+?)"', form)[0])
tags = re.findall(r'(?is)(<input[^>]*?name=[\'"][^\'"]+[^>]*)', form)
except (StandardError, Exception):
return None, None
cats, params = [], {}
attrs = [[(re.findall(r'(?is)%s=[\'"]([^\'"]+)' % attr, c) or [''])[0]
for attr in ['type', 'name', 'value', 'checked']] for c in tags]
for itype, name, value, checked in attrs:
if 'cat' == name[0:3] and 'checkbox' == itype.lower():
if any(checked):
try:
cats += [re.findall('(\d+)[^\d]*$', name)[0]]
except IndexError:
pass
elif 'hidden' == itype.lower() or 'nothing' in name or \
(itype.lower() in ['checkbox', 'radio'] and any(checked)):
params[name] = value
selects = re.findall('(?is)(<select.*?</select>)', form)
for select in selects:
name, values, index = None, None, 0
try:
name = re.findall('(?is)<select\sname="([^"]+)"', select)[0]
values = re.findall('(?is)value="([^"]+)"[^"]+("selected"|</option)', select)
index = ['"selected"' in x[1] for x in values].index(True)
except ValueError:
pass
except IndexError:
continue
params[name] = values[index][0]
restore = params.copy()
restore.update(dict(('cat%s' % c, 'yes') for c in cats))
params.update(dict(('cat%s' % c, 'yes') for c in (
self.categories[(mode, 'Episode')['Propers' == mode]] +
([], self.categories['anime'])[(re.search('(Ca|Pr)', mode) and has_anime()) or
(re.search('(Se|Ep)', mode) and self.show and self.show.is_anime)])))
params['torrentsperpage'] = 40
self.get_url(save_url, post_data=params)
if self.should_skip():
return None, None
return save_url, restore
@staticmethod
def regulate_title(title):
if re.search('(?i)\.web.?(rip)?$', title):
title = '%s.x264' % title
return title
provider = XspeedsProvider()

View file

@ -136,8 +136,22 @@ def snatch_episode(result, end_status=SNATCHED):
result.get_data_func = None # consume only once
if not result.url:
return False
if not result.content and result.url.startswith('magnet-'):
if sickbeard.TORRENT_DIR:
filepath = ek.ek(os.path.join, sickbeard.TORRENT_DIR, 'files.txt')
try:
with open(filepath, 'a') as fh:
result.url = result.url[7:]
fh.write('"%s"\t"%s"\n' % (result.url, sickbeard.TV_DOWNLOAD_DIR))
dl_result = True
except IOError:
logger.log(u'Failed to write to %s' % filepath, logger.ERROR)
return False
else:
logger.log(u'Need to set a torrent blackhole folder', logger.ERROR)
return False
# torrents are saved to disk when blackhole mode
if 'blackhole' == sickbeard.TORRENT_METHOD:
elif 'blackhole' == sickbeard.TORRENT_METHOD:
dl_result = _download_result(result)
else:
# make sure we have the torrent file content
@ -206,20 +220,35 @@ def pass_show_wordlist_checks(name, show):
return True
def pick_best_result(results, show, quality_list=None):
def pick_best_result(results, show, quality_list=None, filter_rls=False):
logger.log(u'Picking the best result out of %s' % [x.name for x in results], logger.DEBUG)
# find the best result for the current episode
best_result = None
for cur_result in results:
best_fallback_result = None
scene_only = scene_or_contain = scene_loose = scene_loose_active = scene_rej_nuked = scene_nuked_active = False
if filter_rls:
try:
provider = getattr(results[0], 'provider', None)
scene_only = getattr(provider, 'scene_only', False)
scene_or_contain = getattr(provider, 'scene_or_contain', '')
recent_task = 'RECENT' in filter_rls
scene_loose = getattr(provider, 'scene_loose', False) and recent_task
scene_loose_active = getattr(provider, 'scene_loose_active', False) and not recent_task
scene_rej_nuked = getattr(provider, 'scene_rej_nuked', False)
scene_nuked_active = getattr(provider, 'scene_nuked_active', False) and not recent_task
except (StandardError, Exception):
filter_rls = False
logger.log(u'Quality is %s for [%s]' % (Quality.qualityStrings[cur_result.quality], cur_result.name))
addendum = ''
for cur_result in results:
if show.is_anime and not show.release_groups.is_valid(cur_result):
continue
if quality_list and cur_result.quality not in quality_list:
logger.log(u'Rejecting unwanted quality [%s]' % cur_result.name, logger.DEBUG)
logger.log(u'Rejecting unwanted quality %s for [%s]' % (
Quality.qualityStrings[cur_result.quality], cur_result.name), logger.DEBUG)
continue
if not pass_show_wordlist_checks(cur_result.name, show):
@ -231,28 +260,77 @@ def pick_best_result(results, show, quality_list=None):
logger.log(u'Rejecting previously failed [%s]' % cur_result.name)
continue
if not best_result or best_result.quality < cur_result.quality != Quality.UNKNOWN:
best_result = cur_result
if filter_rls and any([scene_only, scene_loose, scene_loose_active, scene_rej_nuked, scene_nuked_active]):
if show.is_anime:
addendum = u'anime (skipping scene/nuke filter) '
else:
scene_contains = False
if scene_only and scene_or_contain:
re_extras = dict(re_prefix='.*', re_suffix='.*')
r = show_name_helpers.contains_any(cur_result.name, scene_or_contain, **re_extras)
if None is not r and r:
scene_contains = True
elif best_result.quality == cur_result.quality:
if cur_result.properlevel > best_result.properlevel and \
(not cur_result.is_repack or cur_result.release_group == best_result.release_group):
best_result = cur_result
elif cur_result.properlevel == best_result.properlevel:
if 'xvid' in best_result.name.lower() and 'x264' in cur_result.name.lower():
logger.log(u'Preferring (x264 over xvid) [%s]' % cur_result.name)
best_result = cur_result
elif 'internal' in best_result.name.lower() and 'internal' not in cur_result.name.lower():
best_result = cur_result
if scene_contains and not scene_rej_nuked:
logger.log(u'Considering title match to \'or contain\' [%s]' % cur_result.name, logger.DEBUG)
reject = False
else:
reject, url = can_reject(cur_result.name)
if reject:
if isinstance(reject, basestring):
if scene_rej_nuked and not scene_nuked_active:
logger.log(u'Rejecting nuked release. Nuke reason [%s] source [%s]' % (reject, url),
logger.DEBUG)
elif scene_nuked_active:
best_fallback_result = best_candidate(best_fallback_result, cur_result)
else:
logger.log(u'Considering nuked release. Nuke reason [%s] source [%s]' % (reject, url),
logger.DEBUG)
reject = False
elif scene_contains or any([scene_loose, scene_loose_active]):
best_fallback_result = best_candidate(best_fallback_result, cur_result)
else:
logger.log(u'Rejecting as not scene release listed at any [%s]' % url, logger.DEBUG)
if reject:
continue
best_result = best_candidate(best_result, cur_result)
if best_result and scene_only and not show.is_anime:
addendum = u'scene release filtered '
elif not best_result and best_fallback_result:
addendum = u'non scene release filtered '
best_result = best_fallback_result
if best_result:
logger.log(u'Picked as the best [%s]' % best_result.name, logger.DEBUG)
logger.log(u'Picked as the best %s[%s]' % (addendum, best_result.name), logger.DEBUG)
else:
logger.log(u'No result picked.', logger.DEBUG)
return best_result
def best_candidate(best_result, cur_result):
logger.log(u'Quality is %s for [%s]' % (Quality.qualityStrings[cur_result.quality], cur_result.name))
if not best_result or best_result.quality < cur_result.quality != Quality.UNKNOWN:
best_result = cur_result
elif best_result.quality == cur_result.quality:
if cur_result.properlevel > best_result.properlevel and \
(not cur_result.is_repack or cur_result.release_group == best_result.release_group):
best_result = cur_result
elif cur_result.properlevel == best_result.properlevel:
if 'xvid' in best_result.name.lower() and 'x264' in cur_result.name.lower():
logger.log(u'Preferring (x264 over xvid) [%s]' % cur_result.name)
best_result = cur_result
elif 'internal' in best_result.name.lower() and 'internal' not in cur_result.name.lower():
best_result = cur_result
return best_result
def is_final_result(result):
"""
Checks if the given result is good enough quality that we can stop searching for other ones.
@ -449,7 +527,7 @@ def search_for_needed_episodes(episodes):
continue
# find the best result for the current episode
best_result = pick_best_result(cur_found_results[cur_ep], cur_ep.show)
best_result = pick_best_result(cur_found_results[cur_ep], cur_ep.show, filter_rls=orig_thread_name)
# if all results were rejected move on to the next episode
if not best_result:
@ -488,6 +566,52 @@ def search_for_needed_episodes(episodes):
return found_results.values()
def can_reject(release_name):
"""
Check if a release name should be rejected at external services.
If any site reports result as a valid scene release, then return None, None.
If predb reports result as nuked, then return nuke reason and url attempted.
If fail to find result at all services, return reject and url details for each site.
:param release_name: Release title
:type release_name: String
:return: None, None if release has no issue otherwise True/Nuke reason, URLs that rejected
:rtype: Tuple (None, None or True/String, String)
"""
rej_urls = []
srrdb_url = 'https://www.srrdb.com/api/search/r:%s/order:date-desc' % re.sub('\]\[', '', release_name)
resp = helpers.getURL(srrdb_url, json=True)
if not resp:
srrdb_rej = True
rej_urls += ['Failed contact \'%s\'' % srrdb_url]
else:
srrdb_rej = (not len(resp.get('results', []))
or release_name.lower() != resp.get('results', [{}])[0].get('release', '').lower())
rej_urls += ([], ['\'%s\'' % srrdb_url])[srrdb_rej]
sane_name = helpers.full_sanitizeSceneName(release_name)
predb_url = 'https://predb.ovh/api/v1/?q=@name "%s"' % sane_name
resp = helpers.getURL(predb_url, json=True)
predb_rej = True
if not resp:
rej_urls += ['Failed contact \'%s\'' % predb_url]
elif 'success' == resp.get('status', '').lower():
rows = resp and (resp.get('data') or {}).get('rows') or []
for data in rows:
if sane_name == helpers.full_sanitizeSceneName((data.get('name', '') or '').strip()):
nuke_type = (data.get('nuke') or {}).get('type')
if not nuke_type:
predb_rej = not helpers.tryInt(data.get('preAt'))
else:
predb_rej = 'un' not in nuke_type and data.get('nuke', {}).get('reason', 'Reason not set')
break
rej_urls += ([], ['\'%s\'' % predb_url])[bool(predb_rej)]
pred = any([not srrdb_rej, not predb_rej])
return pred and (None, None) or (predb_rej or True, ', '.join(rej_urls))
def search_providers(show, episodes, manual_search=False, torrent_only=False, try_other_searches=False, old_status=None, scheduled=False):
found_results = {}
final_results = []
@ -518,7 +642,7 @@ def search_providers(show, episodes, manual_search=False, torrent_only=False, tr
found_results[provider_id] = {}
search_count = 0
search_mode = cur_provider.search_mode
search_mode = getattr(cur_provider, 'search_mode', 'eponly')
while True:
search_count += 1
@ -563,7 +687,7 @@ def search_providers(show, episodes, manual_search=False, torrent_only=False, tr
found_results[provider_id][cur_ep] = search_results[cur_ep]
break
elif not cur_provider.search_fallback or search_count == 2:
elif not getattr(cur_provider, 'search_fallback', False) or 2 == search_count:
break
search_mode = '%sonly' % ('ep', 'sp')['ep' in search_mode]
@ -739,7 +863,8 @@ def search_providers(show, episodes, manual_search=False, torrent_only=False, tr
if 0 == len(found_results[provider_id][cur_ep]):
continue
best_result = pick_best_result(found_results[provider_id][cur_ep], show, quality_list)
best_result = pick_best_result(found_results[provider_id][cur_ep], show, quality_list,
filter_rls=orig_thread_name)
# if all results were rejected move on to the next episode
if not best_result:

View file

@ -177,10 +177,11 @@ class ShowQueue(generic_queue.GenericQueue):
def addShow(self, indexer, indexer_id, showDir, default_status=None, quality=None, flatten_folders=None,
lang='en', subtitles=None, anime=None, scene=None, paused=None, blacklist=None, whitelist=None,
wanted_begin=None, wanted_latest=None, tag=None, new_show=False, show_name=None, upgrade_once=False):
wanted_begin=None, wanted_latest=None, prune=None, tag=None,
new_show=False, show_name=None, upgrade_once=False):
queueItemObj = QueueItemAdd(indexer, indexer_id, showDir, default_status, quality, flatten_folders, lang,
subtitles, anime, scene, paused, blacklist, whitelist,
wanted_begin, wanted_latest, tag,
wanted_begin, wanted_latest, prune, tag,
new_show=new_show, show_name=show_name, upgrade_once=upgrade_once)
self.add_item(queueItemObj)
@ -238,7 +239,7 @@ class ShowQueueItem(generic_queue.QueueItem):
class QueueItemAdd(ShowQueueItem):
def __init__(self, indexer, indexer_id, showDir, default_status, quality, flatten_folders, lang, subtitles, anime,
scene, paused, blacklist, whitelist, default_wanted_begin, default_wanted_latest, tag,
scene, paused, blacklist, whitelist, default_wanted_begin, default_wanted_latest, prune, tag,
scheduled_update=False, new_show=False, show_name=None, upgrade_once=False):
self.indexer = indexer
@ -257,6 +258,7 @@ class QueueItemAdd(ShowQueueItem):
self.paused = paused
self.blacklist = blacklist
self.whitelist = whitelist
self.prune = prune
self.tag = tag
self.new_show = new_show
self.showname = show_name
@ -348,6 +350,7 @@ class QueueItemAdd(ShowQueueItem):
self.show.anime = self.anime if None is not self.anime else sickbeard.ANIME_DEFAULT
self.show.scene = self.scene if None is not self.scene else sickbeard.SCENE_DEFAULT
self.show.paused = self.paused if None is not self.paused else False
self.show.prune = self.prune if None is not self.prune else 0
self.show.tag = self.tag if None is not self.tag else 'Show List'
if self.show.anime:

View file

@ -38,11 +38,6 @@ import fnmatch
from imdb._exceptions import IMDbError
try:
from lib.send2trash import send2trash
except ImportError:
pass
from lib.imdb import imdb
from sickbeard import db
@ -118,6 +113,7 @@ class TVShow(object):
self._rls_ignore_words = ''
self._rls_require_words = ''
self._overview = ''
self._prune = 0
self._tag = ''
self._mapped_ids = {}
self._not_found_count = None
@ -165,6 +161,7 @@ class TVShow(object):
rls_ignore_words = property(lambda self: self._rls_ignore_words, dirty_setter('_rls_ignore_words'))
rls_require_words = property(lambda self: self._rls_require_words, dirty_setter('_rls_require_words'))
overview = property(lambda self: self._overview, dirty_setter('_overview'))
prune = property(lambda self: self._prune, dirty_setter('_prune'))
tag = property(lambda self: self._tag, dirty_setter('_tag'))
def _helper_load_failed_db(self):
@ -986,6 +983,10 @@ class TVShow(object):
if not self.overview:
self.overview = sqlResults[0]['overview']
self.prune = sqlResults[0]['prune']
if not self.prune:
self.prune = 0
self.tag = sqlResults[0]['tag']
if not self.tag:
self.tag = 'Show List'
@ -1207,17 +1208,9 @@ class TVShow(object):
+ ek.ek(glob.glob, ic.poster_thumb_path(self.indexerid).replace('poster.jpg', '*')) \
+ ek.ek(glob.glob, ic.fanart_path(self.indexerid).replace('%s.fanart.jpg' % self.indexerid, '')):
cache_dir = ek.ek(os.path.isdir, cache_obj)
logger.log('Attempt to %s cache %s %s' % (action, cache_dir and 'dir' or 'file', cache_obj))
try:
if sickbeard.TRASH_REMOVE_SHOW:
ek.ek(send2trash, cache_obj)
elif cache_dir:
ek.ek(shutil.rmtree, cache_obj)
else:
ek.ek(os.remove, cache_obj)
except OSError as e:
logger.log('Unable to %s %s: %s / %s' % (action, cache_obj, repr(e), str(e)), logger.WARNING)
result = helpers.remove_file(cache_obj, tree=cache_dir, log_level=logger.WARNING)
if result:
logger.log('%s cache %s %s' % (result, cache_dir and 'dir' or 'file', cache_obj))
show_id = '%s' % self.indexerid
if show_id in sickbeard.FANART_RATINGS:
@ -1237,14 +1230,9 @@ class TVShow(object):
except:
logger.log('Unable to change permissions of %s' % self._location, logger.WARNING)
if sickbeard.TRASH_REMOVE_SHOW:
ek.ek(send2trash, self.location)
else:
ek.ek(shutil.rmtree, self.location)
logger.log('%s show folder %s' %
(('Deleted', 'Trashed')[sickbeard.TRASH_REMOVE_SHOW],
self._location))
result = helpers.remove_file(self.location, tree=True)
if result:
logger.log('%s show folder %s' % (result, self._location))
except exceptions.ShowDirNotFoundException:
logger.log('Show folder does not exist, no need to %s %s' % (action, self._location), logger.WARNING)
@ -1270,8 +1258,15 @@ class TVShow(object):
logger.log('%s: Loading all episodes for [%s] with a location from the database' % (self.indexerid, self.name))
myDB = db.DBConnection()
sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? AND location != ''", [self.indexerid])
sqlResults = myDB.select(
'SELECT * FROM tv_episodes'
' WHERE showid = ? AND location != ""'
' ORDER BY season, episode DESC',
[self.indexerid])
kept = 0
deleted = 0
attempted = []
sql_l = []
for ep in sqlResults:
curLoc = ek.ek(os.path.normpath, ep['location'])
@ -1285,6 +1280,22 @@ class TVShow(object):
logger.DEBUG)
continue
# if the path exist and if it's in our show dir
if (self.prune and curEp.location not in attempted and 0 < helpers.get_size(curEp.location) and
ek.ek(os.path.normpath, curLoc).startswith(ek.ek(os.path.normpath, self.location))):
with curEp.lock:
if curEp.status in Quality.DOWNLOADED:
# locations repeat but attempt to delete once
attempted += curEp.location
if kept >= self.prune:
result = helpers.remove_file(curEp.location, prefix_failure=u'%s: ' % self.indexerid)
if result:
logger.log(u'%s: %s file %s' % (self.indexerid,
result, curEp.location), logger.DEBUG)
deleted += 1
else:
kept += 1
# if the path doesn't exist or if it's not in our show dir
if not ek.ek(os.path.isfile, curLoc) or not ek.ek(os.path.normpath, curLoc).startswith(
ek.ek(os.path.normpath, self.location)):
@ -1318,6 +1329,11 @@ class TVShow(object):
if sickbeard.AIRDATE_EPISODES:
curEp.airdateModifyStamp()
if deleted:
logger.log('%s: %s %s media file%s and kept %s most recent downloads' % (
self.indexerid, ('Permanently deleted', 'Trashed')[sickbeard.TRASH_REMOVE_SHOW],
deleted, helpers.maybe_plural(deleted), kept))
if 0 < len(sql_l):
myDB = db.DBConnection()
myDB.mass_action(sql_l)
@ -1441,6 +1457,7 @@ class TVShow(object):
'rls_ignore_words': self.rls_ignore_words,
'rls_require_words': self.rls_require_words,
'overview': self.overview,
'prune': self.prune,
'tag': self.tag,
}
@ -1470,7 +1487,8 @@ class TVShow(object):
+ 'quality: %s\n' % self.quality \
+ 'scene: %s\n' % self.is_scene \
+ 'sports: %s\n' % self.is_sports \
+ 'anime: %s\n' % self.is_anime
+ 'anime: %s\n' % self.is_anime \
+ 'prune: %s\n' % self.prune
def wantEpisode(self, season, episode, quality, manualSearch=False, multi_ep=False):

View file

@ -2984,6 +2984,7 @@ class CMD_SickGearShow(ApiCall):
showDict["requirewords"] = showObj.rls_require_words
if self.overview:
showDict["overview"] = showObj.overview
showDict["prune"] = showObj.prune
showDict["tag"] = showObj.tag
showDict["imdb_id"] = showObj.imdbid
showDict["classification"] = showObj.classification
@ -4189,6 +4190,7 @@ class CMD_SickGearShows(ApiCall):
"upgrade_once": curShow.upgrade_once,
"ignorewords": curShow.rls_ignore_words,
"requirewords": curShow.rls_require_words,
"prune": curShow.prune,
"tag": curShow.tag,
"imdb_id": curShow.imdbid,
"classification": curShow.classification,

View file

@ -77,7 +77,6 @@ from trakt_helpers import build_config, trakt_collection_remove_account
from sickbeard.bs4_parser import BS4Parser
from lib.fuzzywuzzy import fuzz
from lib.send2trash import send2trash
from lib.tmdb_api import TMDB
from lib.tvdb_api.tvdb_exceptions import tvdb_exception
@ -2175,7 +2174,7 @@ class Home(MainHandler):
flatten_folders=None, paused=None, directCall=False, air_by_date=None, sports=None, dvdorder=None,
indexerLang=None, subtitles=None, upgrade_once=None, rls_ignore_words=None,
rls_require_words=None, anime=None, blacklist=None, whitelist=None,
scene=None, tag=None, quality_preset=None, reset_fanart=None, **kwargs):
scene=None, prune=None, tag=None, quality_preset=None, reset_fanart=None, **kwargs):
if show is None:
errString = 'Invalid show ID: ' + str(show)
@ -2334,6 +2333,7 @@ class Home(MainHandler):
showObj.subtitles = subtitles
showObj.air_by_date = air_by_date
showObj.tag = tag
showObj.prune = config.minimax(prune, 0, 0, 9999)
if not directCall:
showObj.lang = indexer_lang
@ -2996,7 +2996,14 @@ class HomePostProcess(Home):
logger.log('Calling SickGear-NG.py script %s is not current version %s, please update.' %
(kwargs.get('ppVersion', '0'), sickbeard.NZBGET_SCRIPT_VERSION), logger.ERROR)
result = processTV.processDir(dir.decode('utf-8') if dir else None, nzbName.decode('utf-8') if nzbName else None,
if isinstance(dir, basestring):
dir = dir.decode('utf-8')
if isinstance(client, basestring) and 'nzbget' == client and \
isinstance(sickbeard.NZBGET_MAP, basestring) and sickbeard.NZBGET_MAP:
m = sickbeard.NZBGET_MAP.split('=')
dir, not_used = helpers.path_mapper(m[0], m[1], dir)
result = processTV.processDir(dir if dir else None, nzbName.decode('utf-8') if nzbName else None,
process_method=process_method, type=type,
cleanup='cleanup' in kwargs and kwargs['cleanup'] in ['on', '1'],
force=force in ['on', '1'],
@ -4100,8 +4107,8 @@ class NewHomeAddShows(Home):
quality_preset=None, anyQualities=None, bestQualities=None, upgrade_once=None,
flatten_folders=None, subtitles=None,
fullShowPath=None, other_shows=None, skipShow=None, providedIndexer=None, anime=None,
scene=None, blacklist=None, whitelist=None, wanted_begin=None, wanted_latest=None, tag=None,
return_to=None, cancel_form=None):
scene=None, blacklist=None, whitelist=None, wanted_begin=None, wanted_latest=None,
prune=None, tag=None, return_to=None, cancel_form=None):
"""
Receive tvdb id, dir, and other options and create a show from them. If extra show dirs are
provided then it forwards back to new_show, if not it goes to /home.
@ -4211,12 +4218,13 @@ class NewHomeAddShows(Home):
wanted_begin = config.minimax(wanted_begin, 0, -1, 10)
wanted_latest = config.minimax(wanted_latest, 0, -1, 10)
prune = config.minimax(prune, 0, 0, 9999)
# add the show
sickbeard.showQueueScheduler.action.addShow(indexer, indexer_id, show_dir, int(defaultStatus), newQuality,
flatten_folders, indexerLang, subtitles, anime,
scene, None, blacklist, whitelist,
wanted_begin, wanted_latest, tag, new_show=new_show,
wanted_begin, wanted_latest, prune, tag, new_show=new_show,
show_name=show_name, upgrade_once=upgrade_once)
# ui.notifications.message('Show added', 'Adding the specified show into ' + show_dir)
@ -4641,6 +4649,9 @@ class Manage(MainHandler):
paused_all_same = True
last_paused = None
prune_all_same = True
last_prune = None
tag_all_same = True
last_tag = None
@ -4685,6 +4696,13 @@ class Manage(MainHandler):
else:
last_paused = curShow.paused
if prune_all_same:
# if we had a value already and this value is different then they're not all the same
if last_prune not in (None, curShow.prune):
prune_all_same = False
else:
last_prune = curShow.prune
if tag_all_same:
# if we had a value already and this value is different then they're not all the same
if last_tag not in (None, curShow.tag):
@ -4738,6 +4756,7 @@ class Manage(MainHandler):
t.showList = toEdit
t.upgrade_once_value = last_upgrade_once if upgrade_once_all_same else None
t.paused_value = last_paused if paused_all_same else None
t.prune_value = last_prune if prune_all_same else None
t.tag_value = last_tag if tag_all_same else None
t.anime_value = last_anime if anime_all_same else None
t.flatten_folders_value = last_flatten_folders if flatten_folders_all_same else None
@ -4752,7 +4771,7 @@ class Manage(MainHandler):
def massEditSubmit(self, upgrade_once=None, paused=None, anime=None, sports=None, scene=None,
flatten_folders=None, quality_preset=False, subtitles=None, air_by_date=None, anyQualities=[],
bestQualities=[], toEdit=None, tag=None, *args, **kwargs):
bestQualities=[], toEdit=None, prune=None, tag=None, *args, **kwargs):
dir_map = {}
for cur_arg in kwargs:
@ -4798,6 +4817,8 @@ class Manage(MainHandler):
new_paused = True if paused == 'enable' else False
new_paused = 'on' if new_paused else 'off'
new_prune = (config.minimax(prune, 0, 0, 9999), showObj.prune)[prune in (None, '', 'keep')]
if tag == 'keep':
new_tag = showObj.tag
else:
@ -4854,7 +4875,7 @@ class Manage(MainHandler):
paused=new_paused, sports=new_sports,
subtitles=new_subtitles, anime=new_anime,
scene=new_scene, air_by_date=new_air_by_date,
tag=new_tag, directCall=True)
prune=new_prune, tag=new_tag, directCall=True)
if curErrors:
logger.log(u'Errors: ' + str(curErrors), logger.ERROR)
@ -5314,6 +5335,39 @@ class History(MainHandler):
return t.respond()
def check_site(self, site_name='', *args, **kwargs):
site_url = dict(
tvdb='api.thetvdb.com', thexem='thexem.de', github='github.com'
).get(site_name.replace('check_', ''))
result = {}
if site_url:
resp = helpers.getURL('https://www.isitdownrightnow.com/check.php?domain=%s' % site_url)
if resp:
check = resp.lower()
day = re.findall(r'(\d+)\s*(?:day)', check)
hr = re.findall(r'(\d+)\s*(?:hour)', check)
mn = re.findall(r'(\d+)\s*(?:min)', check)
if any([day, hr, mn]):
period = ', '.join(
(day and ['%sd' % day[0]] or day)
+ (hr and ['%sh' % hr[0]] or hr)
+ (mn and ['%sm' % mn[0]] or mn))
else:
try:
period = re.findall('[^>]>([^<]+)ago', check)[0].strip()
except (StandardError, Exception):
try:
period = re.findall('[^>]>([^<]+week)', check)[0]
except (StandardError, Exception):
period = 'quite some time'
result = {('last_down', 'down_for')['up' not in check and 'down for' in check]: period}
return json.dumps(result)
def clearHistory(self, *args, **kwargs):
myDB = db.DBConnection()
@ -5531,16 +5585,9 @@ class History(MainHandler):
# locations repeat with watch events but attempt to delete once
attempted += [r['location']]
try:
if sickbeard.TRASH_REMOVE_SHOW:
ek.ek(send2trash, r['location'])
else:
ek.ek(os.remove, r['location'])
except OSError as e:
logger.log(u'Unable to delete file %s: %s' % (r['location'], str(e.strerror)))
if not ek.ek(os.path.isfile, r['location']):
logger.log(u'Deleted file %s' % r['location'])
result = helpers.remove_file(r['location'])
if result:
logger.log(u'%s file %s' % (result, r['location']))
deleted.update({r['tvep_id']: row_show_ids[r['rowid']]})
if row_show_ids[r['rowid']] not in refresh:
@ -5869,9 +5916,10 @@ class ConfigSearch(Config):
pass
return t.respond()
def saveSearch(self, use_nzbs=None, use_torrents=None, nzb_dir=None, sab_username=None, sab_password=None,
sab_apikey=None, sab_category=None, sab_host=None, nzbget_username=None, nzbget_password=None,
nzbget_category=None, nzbget_priority=None, nzbget_host=None, nzbget_use_https=None,
def saveSearch(self, use_nzbs=None, use_torrents=None, nzb_dir=None,
sab_host=None, sab_username=None, sab_password=None, sab_apikey=None, sab_category=None,
nzbget_use_https=None, nzbget_host=None, nzbget_username=None, nzbget_password=None,
nzbget_category=None, nzbget_priority=None, nzbget_parent_map=None,
backlog_days=None, backlog_frequency=None, search_unaired=None, unaired_recent_search_only=None,
recentsearch_frequency=None, nzb_method=None, torrent_method=None, usenet_retention=None,
download_propers=None, propers_webdl_onegrp=None,
@ -5935,6 +5983,7 @@ class ConfigSearch(Config):
sickbeard.NZBGET_HOST = config.clean_host(nzbget_host)
sickbeard.NZBGET_USE_HTTPS = config.checkbox_to_value(nzbget_use_https)
sickbeard.NZBGET_PRIORITY = config.to_int(nzbget_priority, default=100)
sickbeard.NZBGET_MAP = config.kv_csv(nzbget_parent_map)
sickbeard.TORRENT_USERNAME = torrent_username
if set('*') != set(torrent_password):
@ -6351,20 +6400,25 @@ class ConfigProviders(Config):
# a 0 in the key spot indicates that no key is needed
nzb_src.needs_auth = '0' != cur_key
attr = 'search_mode'
if cur_id + '_' + attr in kwargs:
setattr(nzb_src, attr, str(kwargs.get(cur_id + '_' + attr)).strip())
attr = 'filter'
if hasattr(nzb_src, attr):
setattr(nzb_src, attr,
[k for k in nzb_src.may_filter.keys()
if config.checkbox_to_value(kwargs.get('%s_filter_%s' % (cur_id, k)))])
for attr in ['search_fallback', 'enable_recentsearch', 'enable_backlog', 'enable_scheduled_backlog']:
for attr in ['search_fallback', 'enable_recentsearch', 'enable_backlog', 'enable_scheduled_backlog',
'scene_only', 'scene_loose', 'scene_loose_active',
'scene_rej_nuked', 'scene_nuked_active',]:
setattr(nzb_src, attr, config.checkbox_to_value(kwargs.get(cur_id + '_' + attr)))
for attr in ['scene_or_contain', 'search_mode']:
attr_check = '%s_%s' % (cur_id, attr)
if attr_check in kwargs:
setattr(nzb_src, attr, str(kwargs.get(attr_check) or '').strip())
else:
new_provider.enabled = True
_ = new_provider.caps # when adding a custom, trigger server_type update
new_provider.enabled = False
sickbeard.newznabProviderList.append(new_provider)
active_ids.append(cur_id)
@ -6396,10 +6450,21 @@ class ConfigProviders(Config):
# if it already exists then update it
if cur_id in torrent_rss_sources:
torrent_rss_sources[cur_id].name = cur_name
torrent_rss_sources[cur_id].url = cur_url
torrss_src = torrent_rss_sources[cur_id]
torrss_src.name = cur_name
torrss_src.url = cur_url
if cur_cookies:
torrent_rss_sources[cur_id].cookies = cur_cookies
torrss_src.cookies = cur_cookies
for attr in ['scene_only', 'scene_loose', 'scene_loose_active',
'scene_rej_nuked', 'scene_nuked_active']:
setattr(torrss_src, attr, config.checkbox_to_value(kwargs.get(cur_id + '_' + attr)))
for attr in ['scene_or_contain']:
attr_check = '%s_%s' % (cur_id, attr)
if attr_check in kwargs:
setattr(torrss_src, attr, str(kwargs.get(attr_check) or '').strip())
else:
sickbeard.torrentRssProviderList.append(new_provider)
@ -6447,9 +6512,6 @@ class ConfigProviders(Config):
src_id_prefix + attr, '').split(',')]))
torrent_src.url_home = ([url_edit], [])[not url_edit]
for attr in [x for x in ['username', 'uid'] if hasattr(torrent_src, x)]:
setattr(torrent_src, attr, str(kwargs.get(src_id_prefix + attr, '')).strip())
for attr in [x for x in ['password', 'api_key', 'passkey', 'digest', 'hash'] if hasattr(torrent_src, x)]:
key = str(kwargs.get(src_id_prefix + attr, '')).strip()
if 'password' == attr:
@ -6457,25 +6519,15 @@ class ConfigProviders(Config):
elif not starify(key, True):
setattr(torrent_src, attr, key)
attr = 'ratio'
if hasattr(torrent_src, '_seed_' + attr) and src_id_prefix + attr in kwargs:
setattr(torrent_src, '_seed_' + attr, kwargs.get(src_id_prefix + attr, '').strip() or None)
for attr in filter(lambda a: hasattr(torrent_src, a), [
'username', 'uid', '_seed_ratio', 'scene_or_contain'
]):
setattr(torrent_src, attr, str(kwargs.get(src_id_prefix + attr.replace('_seed_', ''), '')).strip())
for attr in [x for x in ['minseed', 'minleech'] if hasattr(torrent_src, x)]:
setattr(torrent_src, attr, config.to_int(str(kwargs.get(src_id_prefix + attr)).strip()))
for attr in [x for x in ['confirmed', 'freeleech', 'reject_m2ts', 'enable_recentsearch',
'enable_backlog', 'search_fallback', 'enable_scheduled_backlog']
if hasattr(torrent_src, x) and src_id_prefix + attr in kwargs]:
setattr(torrent_src, attr, config.checkbox_to_value(kwargs.get(src_id_prefix + attr)))
attr = 'seed_time'
if hasattr(torrent_src, attr) and src_id_prefix + attr in kwargs:
setattr(torrent_src, attr, config.to_int(str(kwargs.get(src_id_prefix + attr)).strip()))
attr = 'search_mode'
if hasattr(torrent_src, attr):
setattr(torrent_src, attr, str(kwargs.get(src_id_prefix + attr, '')).strip() or 'eponly')
for attr in filter(lambda a: hasattr(torrent_src, a), [
'minseed', 'minleech', 'seed_time'
]):
setattr(torrent_src, attr, config.to_int(str(kwargs.get(src_id_prefix + attr, '')).strip()))
attr = 'filter'
if hasattr(torrent_src, attr):
@ -6483,6 +6535,19 @@ class ConfigProviders(Config):
[k for k in torrent_src.may_filter.keys()
if config.checkbox_to_value(kwargs.get('%sfilter_%s' % (src_id_prefix, k)))])
for attr in filter(lambda a: hasattr(torrent_src, a), [
'confirmed', 'freeleech', 'reject_m2ts', 'enable_recentsearch',
'enable_backlog', 'search_fallback', 'enable_scheduled_backlog',
'scene_only', 'scene_loose', 'scene_loose_active',
'scene_rej_nuked', 'scene_nuked_active'
]):
setattr(torrent_src, attr, config.checkbox_to_value(kwargs.get(src_id_prefix + attr)))
for attr, default in filter(lambda (a, _): hasattr(torrent_src, a), [
('search_mode', 'eponly'),
]):
setattr(torrent_src, attr, str(kwargs.get(src_id_prefix + attr) or default).strip())
# update nzb source settings
for nzb_src in [src for src in sickbeard.providers.sortedProviderList() if
sickbeard.GenericProvider.NZB == src.providerType]:
@ -6498,18 +6563,21 @@ class ConfigProviders(Config):
if hasattr(nzb_src, attr):
setattr(nzb_src, attr, str(kwargs.get(src_id_prefix + attr, '')).strip() or None)
attr = 'search_mode'
if hasattr(nzb_src, attr):
setattr(nzb_src, attr, str(kwargs.get(src_id_prefix + attr, '')).strip() or 'eponly')
attr = 'enable_recentsearch'
if hasattr(nzb_src, attr):
setattr(nzb_src, attr, config.checkbox_to_value(kwargs.get(src_id_prefix + attr)) or
not getattr(nzb_src, 'supports_backlog', True))
for attr in [x for x in ['search_fallback', 'enable_backlog', 'enable_scheduled_backlog'] if hasattr(nzb_src, x)]:
for attr in filter(lambda x: hasattr(nzb_src, x),
['search_fallback', 'enable_backlog', 'enable_scheduled_backlog',
'scene_only', 'scene_loose', 'scene_loose_active',
'scene_rej_nuked', 'scene_nuked_active']):
setattr(nzb_src, attr, config.checkbox_to_value(kwargs.get(src_id_prefix + attr)))
for (attr, default) in [('scene_or_contain', ''), ('search_mode', 'eponly')]:
if hasattr(nzb_src, attr):
setattr(nzb_src, attr, str(kwargs.get(src_id_prefix + attr) or default).strip())
sickbeard.NEWZNAB_DATA = '!!!'.join([x.config_str() for x in sickbeard.newznabProviderList])
sickbeard.PROVIDER_ORDER = provider_list

File diff suppressed because it is too large Load diff