Change improve the manage searches error stats UI and backend functions.

Add the improved error handling to torrent providers.
Change cache_db to always include db upgrade statements when running SG for the first time.
Change split newznab limit logic into a reusable function, hit_limit_update() and use in BTN prov.
Change tweak CSS to make things a little neater with button spacings.
Add the actual time when current limit will expire to the UI.
Change instead of terminology "errors", use "failures".
Change improve BTN error handling.
Change ensure provider name is output to log at times where it wasn't.
Change ensure failed request URLs and POST params are output to log.
Add time of last failure + type to should_skip log message.
Change code DRY and simplification for improved readability.
Change occurrences of "error" to "fail" for consistency.
Add tmr limit handling to omg and change log_failure_url level to warning.
Change log the failed URL when an API hit limit is reached.
Change "hit" to the more universally generic "tmr" - Too Many Requests.
Change Db columns containing "hit_" are renamed "tmr_"
This commit is contained in:
JackDandy 2018-01-15 17:54:36 +00:00
parent f9cc6ed330
commit 0ead7771de
66 changed files with 1003 additions and 547 deletions

View file

@ -4,6 +4,9 @@
* Change improve media process to parse anime format 'Show Name 123 - 001 - Ep 1 name'
* Add free space stat (if obtainable) of parent folder(s) to footer
* Add option "Display disk free" to general config/interface page (default enabled)
* Add a provider error table to page Manage/Media Search
* Add failure handling, skip provider for x hour(s) depending on count of failures
* Add detection of Too Many Requests (Supporting providers UC and BTN)
[develop changelog]

View file

@ -762,6 +762,60 @@ a.whitelink{
}
/* TABLE BACKGROUND color */
.provider-failures.hover-highlight td:before,
.provider-failures.focus-highlight td:before{
background:#222
}
/* ODD ZEBRA STRIPE color (needs zebra widget) */
.provider-failures.hover-highlight .odd td:before,
.provider-failures.hover-highlight .odd th:before,
.provider-failures.focus-highlight .odd td:before,
.provider-failures.focus-highlight .odd th:before{
background:#333
}
/* EVEN ZEBRA STRIPE color (needs zebra widget) */
.provider-failures.hover-highlight .even td:before,
.provider-failures.hover-highlight .even th:before,
.provider-failures.focus-highlight .even td:before,
.provider-failures.focus-highlight .even th:before{
background-color:#2e2e2e
}
/* HOVER ROW highlight colors */
.provider-failures.hover-highlight tbody > tr:hover > td, /* override tablesorter theme row hover */
.provider-failures.hover-highlight tbody > tr.odd:hover > td,
.provider-failures.hover-highlight tbody > tr.even:hover > td{
background-color:#282828
}
/* HOVER COLUMN highlight colors */
.provider-failures.hover-highlight tbody tr th:hover::after,
.provider-failures.hover-highlight tbody tr td:hover::after{
background-color:#282828
}
/* FOCUS ROW highlight color (touch devices) */
.provider-failures.focus-highlight td:focus::before,
.provider-failures.focus-highlight th:focus::before{
background-color:#181818
}
/* FOCUS COLUMN highlight color (touch devices) */
.provider-failures.focus-highlight td:focus::after,
.provider-failures.focus-highlight th:focus::after{
background-color:#181818
}
/* FOCUS CELL highlight color */
.provider-failures.focus-highlight th:focus,
.provider-failures.focus-highlight td:focus,
.provider-failures.focus-highlight .odd th:focus,
.provider-failures.focus-highlight .odd td:focus,
.provider-failures.focus-highlight .even th:focus,
.provider-failures.focus-highlight .even td:focus{
background-color:#181818;
color:#ddd
}
/* =======================================================================
404.tmpl
========================================================================== */

View file

@ -742,6 +742,60 @@ a.whitelink{
color:#000
}
/* TABLE BACKGROUND color */
.provider-failures.hover-highlight td:before,
.provider-failures.focus-highlight td:before{
background:#fff
}
/* ODD ZEBRA STRIPE color (needs zebra widget) */
.provider-failures.hover-highlight .odd th:before,
.provider-failures.hover-highlight .odd td:before,
.provider-failures.focus-highlight .odd th:before,
.provider-failures.focus-highlight .odd td:before{
background:#f5f1e4
}
/* EVEN ZEBRA STRIPE color (needs zebra widget) */
.provider-failures.hover-highlight .even th:before,
.provider-failures.hover-highlight .even td:before,
.provider-failures.focus-highlight .even th:before,
.provider-failures.focus-highlight .even td:before{
background-color:#dfdacf;
}
/* HOVER ROW highlight colors */
.provider-failures.hover-highlight tbody > tr:hover > td, /* override tablesorter theme row hover */
.provider-failures.hover-highlight tbody > tr.odd:hover > td,
.provider-failures.hover-highlight tbody > tr.even:hover > td{
background-color:#f4f3c2
}
/* HOVER COLUMN highlight colors */
.provider-failures.hover-highlight tbody tr th:hover::after,
.provider-failures.hover-highlight tbody tr td:hover::after{
background-color:#f4f3c2
}
/* FOCUS ROW highlight color (touch devices) */
.provider-failures.focus-highlight th:focus::before,
.provider-failures.focus-highlight td:focus::before{
background-color:#dfdead
}
/* FOCUS COLUMN highlight color (touch devices) */
.provider-failures.focus-highlight th:focus::after,
.provider-failures.focus-highlight td:focus::after{
background-color:#dfdead
}
/* FOCUS CELL highlight color */
.provider-failures.focus-highlight th:focus,
.provider-failures.focus-highlight td:focus,
.provider-failures.focus-highlight .odd th:focus,
.provider-failures.focus-highlight .odd td:focus,
.provider-failures.focus-highlight .even th:focus,
.provider-failures.focus-highlight .even td:focus{
background-color:#dfdead;
color:#222
}
/* =======================================================================
404.tmpl
========================================================================== */
@ -1381,8 +1435,8 @@ tablesorter.css
}
thead.tablesorter-stickyHeader{
border-top:2px solid #fff;
border-bottom:2px solid #fff
border-top:2px solid #ddd;
border-bottom:2px solid #ddd
}
/* Zebra Widget - row alternating colors */
@ -1404,7 +1458,7 @@ thead.tablesorter-stickyHeader{
}
.tablesorter tfoot tr{
color:#fff;
color:#ddd;
text-align:center;
text-shadow:-1px -1px 0 rgba(0, 0, 0, 0.3);
background-color:#333;

View file

@ -3191,6 +3191,85 @@ input.get_less_eps{
display:none
}
#media-search .section{
padding-bottom:10px
}
#media-search .btn{
margin:0 6px 0 0;
min-width:70px
}
#media-search .btn.shows-more,
#media-search .btn.shows-less{
margin:6px 6px 6px 0;
}
#media-search .btn.provider-retry{
margin:6px 0 6px 4px;
}
.tablesorter.provider-failures{width:auto;clear:both;margin-bottom:10px}
.tablesorter.provider-failures > tbody > tr.tablesorter-childRow td{display:none}
.tablesorter.provider-failures.tablesorter > tbody > tr{background-color:transparent}
.provider-failures.hover-highlight th:hover::after,
.provider-failures.hover-highlight td:hover::after,
.provider-failures.focus-highlight th:focus::after,
.provider-failures.focus-highlight td:focus::after{
content:'';
position:absolute;
width:100%;
height:999em;
left:0;
top:-555em;
z-index:-1
}
.provider-failures.focus-highlight th:focus::before,
.provider-failures.focus-highlight td:focus::before{
content:'';
position:absolute;
width:999em;
height:100%;
left:-555em;
top:0;
z-index:-2
}
/* required styles */
.provider-failures.hover-highlight,
.provider-failures.focus-highlight{
overflow:hidden
}
.provider-failures.hover-highlight th,
.provider-failures.hover-highlight td,
.provider-failures.focus-highlight th,
.provider-failures.focus-highlight td{
position:relative;
outline:0
}
/* override the tablesorter theme styling */
.provider-failures.hover-highlight,
.provider-failures.hover-highlight tbody > tr > td,
.provider-failures.focus-highlight,
.provider-failures.focus-highlight tbody > tr > td,
/* override zebra styling */
.provider-failures.hover-highlight tbody tr.even > th,
.provider-failures.hover-highlight tbody tr.even > td,
.provider-failures.hover-highlight tbody tr.odd > th,
.provider-failures.hover-highlight tbody tr.odd > td,
.provider-failures.focus-highlight tbody tr.even > th,
.provider-failures.focus-highlight tbody tr.even > td,
.provider-failures.focus-highlight tbody tr.odd > th,
.provider-failures.focus-highlight tbody tr.odd > td{
background:transparent
}
/* table background positioned under the highlight */
.provider-failures.hover-highlight td:before,
.provider-failures.focus-highlight td:before{
content:'';
position:absolute;
width:100%;
height:100%;
left:0;
top:0;
z-index:-3
}
/* =======================================================================
404.tmpl
========================================================================== */
@ -4265,11 +4344,9 @@ tablesorter.css
#display-show .tablesorter{
width:100%;
margin-right:auto;
margin-left:auto;
color:#000;
margin-left:auto
/* text-align:left;*/
background-color:#ddd/*;
border-spacing:0*/
/* border-spacing:0*/
}
#display-show .tablesorter{
@ -4317,20 +4394,6 @@ tablesorter.css
cursor:default
}
thead.tablesorter-stickyHeader{
border-top:2px solid #ddd;
border-bottom:2px solid #ddd
}
/* Zebra Widget - row alternating colors */
.tablesorter tr.odd, .sickbeardTable tr.odd{
background-color:#f5f1e4
}
.tablesorter tr.even, .sickbeardTable tr.even{
background-color:#dfdacf
}
/* filter widget */
.tablesorter .filtered{
display:none
@ -4346,9 +4409,7 @@ thead.tablesorter-stickyHeader{
.tablesorter tr.tablesorter-filter-row,
.tablesorter tr.tablesorter-filter-row td{
text-align:center;
background:#eee;
border-bottom:1px solid #ddd
text-align:center
}
/* optional disabled input styling */
@ -4362,10 +4423,7 @@ thead.tablesorter-stickyHeader{
}*/
.tablesorter tfoot tr{
color:#ddd;
text-align:center;
text-shadow:-1px -1px 0 rgba(0, 0, 0, 0.3);
background-color:#333;
border-collapse:collapse
}

View file

@ -65,11 +65,7 @@ except AttributeError:
diskfree, min_output = df()
if min_output:
avail = ', '.join(['%s <span class="footerhighlight">%s</span>' % (drive, free) for (drive, free) in diskfree])
%>
<style>
.stat-table{margin:0 auto}
.stat-table > tbody > tr > td{padding:0 5px}
</style>
%>#slurp#
##
<span class="footerhighlight">$shows_total</span> shows (<span class="footerhighlight">$shows_active</span> active)
| <span class="footerhighlight">$ep_downloaded</span><%=
@ -87,6 +83,10 @@ if min_output:
<br>free space&nbsp;&nbsp;$avail
#else
<div class="table-responsive">
<style>
.stat-table{margin:0 auto}
.stat-table > tbody > tr > td{padding:0 5px}
</style>
<table class="stat-table" cellspacing="5" cellpadding="5">
<caption style="display:none">Free space stats for volume/path</caption>
<tbody>

View file

@ -8,169 +8,230 @@
##
#import os.path
#include $os.path.join($sickbeard.PROG_DIR, 'gui/slick/interfaces/default/inc_top.tmpl')
<input type="hidden" id="sbRoot" value="$sbRoot">
<input type="hidden" id="sbRoot" value="$sbRoot">
<script type="text/javascript" src="$sbRoot/js/plotTooltip.js?v=$sbPID"></script>
<script type="text/javascript" src="$sbRoot/js/manageSearches.js?v=$sbPID"></script>
<div id="content800">
<div id="media-search" class="align-left">
#if $varExists('header')
<h1 class="header">$header</h1>
#else
<h1 class="title">$title</h1>
#end if
<div id="summary2" class="align-left">
<div id="backlog-search" class="section">
<h3>Backlog Search:</h3>
<a id="forcebacklog" class="btn#if $standard_backlog_running or $backlog_is_active# disabled#end if#" href="$sbRoot/manage/manageSearches/forceBacklog"><i class="sgicon-play"></i> Force</a>
<a id="pausebacklog" class="btn" href="$sbRoot/manage/manageSearches/pauseBacklog?paused=#if $backlog_paused then "0" else "1"#"><i class="#if $backlog_paused then "sgicon-play" else "sgicon-pause"#"></i> #if $backlog_paused then "Unpause" else "Pause"#</a>
#if $backlog_paused then 'Paused: ' else ''#
#if $backlog_paused
Paused:
#end if##slurp#
#if not $backlog_running and not $backlog_is_active:
Not in progress<br />
Not in progress
#else
Currently running#if $backlog_running_type != "None"# ($backlog_running_type)#end if#<br />
Currently running#if $backlog_running_type != "None"# ($backlog_running_type)#end if#
#end if
<br />
</div>
<div id="recent-search" class="section">
<h3>Recent Search:</h3>
<a id="recentsearch" class="btn#if $recent_search_status# disabled#end if#" href="$sbRoot/manage/manageSearches/forceSearch"><i class="sgicon-play"></i> Force</a>
#if not $recent_search_status
Not in progress<br />
Not in progress
#else
In Progress<br />
In Progress
#end if
<br />
</div>
<div id="propers-search" class="section">
<h3>Find Propers Search:</h3>
<a id="propersearch" class="btn#if $find_propers_status# disabled#end if#" href="$sbRoot/manage/manageSearches/forceFindPropers"><i class="sgicon-play"></i> Force</a>
#if not $find_propers_status
Not in progress<br />
Not in progress
#else
In Progress<br />
In Progress
#end if
<br />
#if $provider_errors
<h3>Provider Errors:</h3><br>
#for $prov in $provider_error_stats
#if $len($prov['errors'])
$prov['name']<input type="button" class="shows-more btn" id="$prov['name']-btn-more" value="Expand" style="display:none"><input type="button" class="shows-less btn" id="$prov['name']-btn-less" value="Collapse">
#if $prov['next_try']
#set nt = $str($prov['next_try']).split('.', 2)
Next try in: $nt[0] <input type="button" class="prov-retry btn" id="$prov['prov_id']-btn-retry" value="Retry">
#end if
<br>
<table class="sickbeardTable manageTable" cellspacing="1" border="0" cellpadding="0">
<thead><th>Day</th><th>http</th><th>connection</th><th>connection timeout</th><th>timeout</th><th>unknown</th><th>no data</th>
#if $prov['has_limit']
<th>Hit Limit</th>
#end if
</div>
<div id="provider-failures" class="section">
<h3>Provider Failures:</h3>
#if not $provider_fails
<p>No current failures. Failure stats display here when appropriate.</p>
#else
<p>Some providers can be often down over periods, SickGear will back off then retry connecting at a later time</p>
#for $prov in $provider_fail_stats
#if $len($prov['fails'])
<input type="button" class="shows-more btn" id="$prov['name']-btn-more" value="Expand" style="display:none"><input type="button" class="shows-less btn" id="$prov['name']-btn-less" value="Collapse"><img src="$sbRoot/images/providers/$prov['prov_img']" width="16" height="16" style="margin:0 6px" />$prov['name']
#if $prov['active']
#if $prov['next_try']
#set nt = $str($prov['next_try']).split('.', 2)
... is blocked until $sbdatetime.sbdatetime.sbftime($sbdatetime.sbdatetime.now() + $prov['next_try'], markup=True) (in $nt[0]) <input type="button" class="provider-retry btn" id="$prov['prov_id']-btn-retry" value="Ignore block on next search">
#end if
#else
... is not enabled
#end if
<table class="manageTable provider-failures tablesorter hover-highlight focus-highlight text-center" cellspacing="0" border="0" cellpadding="0">
<thead>
<tr>
<th class="text-center" style="width:13em;padding-right:20px">period of 1hr</th>
<th class="text-center" style="padding-right:20px">server/timeout</th>
<th class="text-center" style="padding-right:20px">network</th>
<th class="text-center" style="padding-right:20px">no data</th>
<th class="text-center" style="padding-right:20px">other</th>
#if $prov['has_limit']
<th class="text-center" style="padding-right:20px">hit limit</th>
#end if
</tr>
</thead>
<tbody>
#set $row = 0
#for $error in $prov['errors']
<tr class="#echo ('odd', 'even')[$row % 2]##set $row+=1#">
<td style="color:white">$sbdatetime.sbdatetime.sbfdate($error['date'])</td>
<td style="color:white">$error['http'].get('count', 0)</td>
<td style="color:white">$error['connection'].get('count', 0)</td>
<td style="color:white">$error['connection_timeout'].get('count', 0)</td>
<td style="color:white">$error['timeout'].get('count', 0)</td>
<td style="color:white">$error['unknown'].get('count', 0)</td>
<td style="color:white">$error['nodata'].get('count', 0)</td>
#if $prov['has_limit']
<td style="color:white">$error.get('limit', {}).get('count', 0)</td>
#end if
</tr>
#end for
#set $day = []
#for $fail in $prov['fails']
#set $child = True
#if $fail['date'] not in $day
#set $day += [$fail['date']]
#set $child = False
#end if
#slurp#
<tr#if $fail['multirow'] and $child# class="tablesorter-childRow"#end if#>
#if $fail['multirow']
#if not $child
<td><a href="#" class="provider-fail-parent-toggle" title="Totals (expand for detail)">$sbdatetime.sbdatetime.sbfdate($fail['date_time'])</a></td>
#else
<td>$sbdatetime.sbdatetime.sbftime($fail['date_time'], markup=True)</td>
#end if
#else
<td>$sbdatetime.sbdatetime.sbfdatetime($fail['date_time'], markup=True)</td>
#end if
#set $blank = '-'
#set $title=None
#if $fail['http']['count']
#set $title=$fail['http']['code']
#end if
<td>#if $fail['http']['count']#<span title="#if $child#$title#else#Expand for fail codes#end if#">$fail['http']['count']</span>#else#$blank#end if# / #echo $fail['timeout'].get('count', 0) or $blank#</td>
<td>#echo ($fail['connection'].get('count', 0) + $fail['connection_timeout'].get('count', 0)) or $blank#</td>
<td>#echo $fail['nodata'].get('count', 0) or $blank#</td>
<td>#echo $fail['other'].get('count', 0) or $blank#</td>
#if $prov['has_limit']
<td>#echo $fail.get('limit', {}).get('count', 0) or $blank#</td>
#end if
</tr>
#end for
</tbody>
</table>
#end if
#end for
<br>
<br /><br />
#end if
#end for
#end if
<h3>Search Queue:</h3>
</div>
<div id="search-queues" class="section">
<h3>Search Queues:</h3>
#if $queue_length['backlog'] or $queue_length['manual'] or $queue_length['failed']
<input type="button" class="show-all-more btn" id="all-btn-more" value="Expand All"><input type="button" class="show-all-less btn" id="all-btn-less" value="Collapse All"><br>
#end if
<br>
Recent: <i>$queue_length['recent'] item$sickbeard.helpers.maybe_plural($queue_length['recent'])</i><br><br>
Proper: <i>$queue_length['proper'] item$sickbeard.helpers.maybe_plural($queue_length['proper'])</i><br><br>
Backlog: <i>$len($queue_length['backlog']) item$sickbeard.helpers.maybe_plural($len($queue_length['backlog']))</i>
<div id="queue-recent" class="section">
Recent: <i>$queue_length['recent'] item$sickbeard.helpers.maybe_plural($queue_length['recent'])</i>
</div>
<div id="queue-proper" class="section">
Proper: <i>$queue_length['proper'] item$sickbeard.helpers.maybe_plural($queue_length['proper'])</i>
</div>
<div id="queue-backlog" class="section">
Backlog: <i>$len($queue_length['backlog']) item$sickbeard.helpers.maybe_plural($len($queue_length['backlog']))</i>
#if $queue_length['backlog']
<input type="button" class="shows-more btn" id="backlog-btn-more" value="Expand" #if not $queue_length['backlog']# style="display:none" #end if#><input type="button" class="shows-less btn" id="backlog-btn-less" value="Collapse" style="display:none"><br>
<table class="sickbeardTable manageTable" cellspacing="1" border="0" cellpadding="0" style="display:none">
<thead></thead>
<tbody>
#set $row = 0
#for $cur_item in $queue_length['backlog']:
#set $search_type = 'On Demand'
#if $cur_item['standard_backlog']:
#if $cur_item['forced']:
#set $search_type = 'Forced'
#else
#set $search_type = 'Scheduled'
#end if
#if $cur_item['torrent_only']:
#set $search_type += ', Torrent Only'
#end if
#if $cur_item['limited_backlog']:
#set $search_type += ' (Limited)'
#else
#set $search_type += ' (Full)'
#end if
#end if
<tr class="#echo ('odd', 'even')[$row % 2]##set $row+=1#">
<td style="width:80%;text-align:left;color:white">
<a class="whitelink" href="$sbRoot/home/displayShow?show=$cur_item['indexerid']">$cur_item['name']</a> - $sickbeard.helpers.make_search_segment_html_string($cur_item['segment'])
</td>
<td style="width:20%;text-align:center;color:white">$search_type</td>
</tr>
#end for
</tbody>
</table>
#else
<br>
<input type="button" class="shows-more btn" id="backlog-btn-more" value="Expand" #if not $queue_length['backlog']# style="display:none" #end if#><input type="button" class="shows-less btn" id="backlog-btn-less" value="Collapse" style="display:none"><br>
<table class="sickbeardTable manageTable" cellspacing="1" border="0" cellpadding="0" style="display:none">
<thead></thead>
<tbody>
#set $row = 0
#for $cur_item in $queue_length['backlog']:
#set $search_type = 'On Demand'
#if $cur_item['standard_backlog']:
#if $cur_item['forced']:
#set $search_type = 'Forced'
#else
#set $search_type = 'Scheduled'
#end if
#if $cur_item['torrent_only']:
#set $search_type += ', Torrent Only'
#end if
#if $cur_item['limited_backlog']:
#set $search_type += ' (Limited)'
#else
#set $search_type += ' (Full)'
#end if
#end if
<tr class="#echo ('odd', 'even')[$row % 2]##set $row+=1#">
<td style="width:80%;text-align:left;color:white">
<a class="whitelink" href="$sbRoot/home/displayShow?show=$cur_item['indexerid']">$cur_item['name']</a> - $sickbeard.helpers.make_search_segment_html_string($cur_item['segment'])
</td>
<td style="width:20%;text-align:center;color:white">$search_type</td>
</tr>
#end for
</tbody>
</table>
#end if
<br>
Manual: <i>$len($queue_length['manual']) item$sickbeard.helpers.maybe_plural($len($queue_length['manual']))</i>
</div>
<div id="queue-manual" class="section">
Manual: <i>$len($queue_length['manual']) item$sickbeard.helpers.maybe_plural($len($queue_length['manual']))</i>
#if $queue_length['manual']
<input type="button" class="shows-more btn" id="manual-btn-more" value="Expand" #if not $queue_length['manual']# style="display:none" #end if#><input type="button" class="shows-less btn" id="manual-btn-less" value="Collapse" style="display:none"><br>
<table class="sickbeardTable manageTable" cellspacing="1" border="0" cellpadding="0" style="display:none">
<thead></thead>
<tbody>
#set $row = 0
#for $cur_item in $queue_length['manual']:
<tr class="#echo ('odd', 'even')[$row % 2]##set $row+=1#">
<td style="width:100%;text-align:left;color:white">
<a class="whitelink" href="$sbRoot/home/displayShow?show=$cur_item['indexerid']">$cur_item['name']</a> - $sickbeard.helpers.make_search_segment_html_string($cur_item['segment'])
</td>
</tr>
#end for
</tbody>
</table>
#else
<br>
<input type="button" class="shows-more btn" id="manual-btn-more" value="Expand" #if not $queue_length['manual']# style="display:none" #end if#><input type="button" class="shows-less btn" id="manual-btn-less" value="Collapse" style="display:none"><br>
<table class="sickbeardTable manageTable" cellspacing="1" border="0" cellpadding="0" style="display:none">
<thead></thead>
<tbody>
#set $row = 0
#for $cur_item in $queue_length['manual']:
<tr class="#echo ('odd', 'even')[$row % 2]##set $row+=1#">
<td style="width:100%;text-align:left;color:white">
<a class="whitelink" href="$sbRoot/home/displayShow?show=$cur_item['indexerid']">$cur_item['name']</a> - $sickbeard.helpers.make_search_segment_html_string($cur_item['segment'])
</td>
</tr>
#end for
</tbody>
</table>
#end if
<br>
Failed: <i>$len($queue_length['failed']) item$sickbeard.helpers.maybe_plural($len($queue_length['failed']))</i>
</div>
<div id="queue-failed" class="section">
Failed: <i>$len($queue_length['failed']) item$sickbeard.helpers.maybe_plural($len($queue_length['failed']))</i>
#if $queue_length['failed']
<input type="button" class="shows-more btn" id="failed-btn-more" value="Expand" #if not $queue_length['failed']# style="display:none" #end if#><input type="button" class="shows-less btn" id="failed-btn-less" value="Collapse" style="display:none"><br>
<table class="sickbeardTable manageTable" cellspacing="1" border="0" cellpadding="0" style="display:none">
<thead></thead>
<tbody>
#set $row = 0
#for $cur_item in $queue_length['failed']:
<tr class="#echo ('odd', 'even')[$row % 2]##set $row+=1#">
<td style="width:100%;text-align:left;color:white">
<a class="whitelink" href="$sbRoot/home/displayShow?show=$cur_item['indexerid']">$cur_item['name']</a> - $sickbeard.helpers.make_search_segment_html_string($cur_item['segment'])
</td>
</tr>
#end for
</tbody>
</table>
#else
<br>
<input type="button" class="shows-more btn" id="failed-btn-more" value="Expand" #if not $queue_length['failed']# style="display:none" #end if#><input type="button" class="shows-less btn" id="failed-btn-less" value="Collapse" style="display:none"><br>
<table class="sickbeardTable manageTable" cellspacing="1" border="0" cellpadding="0" style="display:none">
<thead></thead>
<tbody>
#set $row = 0
#for $cur_item in $queue_length['failed']:
<tr class="#echo ('odd', 'even')[$row % 2]##set $row+=1#">
<td style="width:100%;text-align:left;color:white">
<a class="whitelink" href="$sbRoot/home/displayShow?show=$cur_item['indexerid']">$cur_item['name']</a> - $sickbeard.helpers.make_search_segment_html_string($cur_item['segment'])
</td>
</tr>
#end for
</tbody>
</table>
#end if
</div>
</div>
</div>
#include $os.path.join($sickbeard.PROG_DIR, 'gui/slick/interfaces/default/inc_bottom.tmpl')

View file

@ -1,36 +1,36 @@
$(document).ready(function() {
$(function(){
$('#recentsearch,#propersearch').click(function(){
$(this).addClass('disabled');
})
});
$('#forcebacklog,#forcefullbacklog').click(function(){
$('#forcebacklog,#forcefullbacklog').addClass('disabled');
$('#pausebacklog').removeClass('disabled');
})
});
$('#pausebacklog').click(function(){
$(this).addClass('disabled');
})
});
$('.show-all-less').click(function(){
$(this).nextAll('table').hide();
$(this).nextAll('input.shows-more').show();
$(this).nextAll('input.shows-less').hide();
})
});
$('.show-all-more').click(function(){
$(this).nextAll('table').show();
$(this).nextAll('input.shows-more').hide();
$(this).nextAll('input.shows-less').show();
})
});
$('.shows-less').click(function(){
$(this).nextAll('table:first').hide();
$(this).hide();
$(this).prevAll('input:first').show();
})
});
$('.shows-more').click(function(){
$(this).nextAll('table:first').show();
$(this).hide();
$(this).nextAll('input:first').show();
})
$('.prov-retry').click(function () {
});
$('.provider-retry').click(function () {
$(this).addClass('disabled');
var match = $(this).attr('id').match(/^(.+)-btn-retry$/);
$.ajax({
@ -38,7 +38,29 @@ $(document).ready(function() {
type: 'GET',
complete: function () {
window.location.reload(true);
}
}
});
})
});
});
$('.provider-failures').tablesorter({widgets : ['zebra'],
headers : { 0:{sorter:!1}, 1:{sorter:!1}, 2:{sorter:!1}, 3:{sorter:!1}, 4:{sorter:!1}, 5:{sorter:!1} }
});
$('.provider-fail-parent-toggle').click(function(){
$(this).closest('tr').nextUntil('tr:not(.tablesorter-childRow)').find('td').toggle();
return !1;
});
// Make table cell focusable
// http://css-tricks.com/simple-css-row-column-highlighting/
var focus$ = $('.focus-highlight');
if (focus$.length){
focus$.find('td, th')
.attr('tabindex', '1')
// add touch device support
.on('touchstart', function(){
$(this).focus();
});
}
});

View file

@ -17,6 +17,8 @@
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
from sickbeard import db
from collections import OrderedDict
import re
MIN_DB_VERSION = 1
MAX_DB_VERSION = 4
@ -24,107 +26,80 @@ MAX_DB_VERSION = 4
# Add new migrations at the bottom of the list; subclass the previous migration.
class InitialSchema(db.SchemaUpgrade):
def __init__(self, connection):
super(InitialSchema, self).__init__(connection)
self.queries = OrderedDict([
('base', [
'CREATE TABLE lastUpdate(provider TEXT, time NUMERIC)',
'CREATE TABLE lastSearch(provider TEXT, time NUMERIC)',
'CREATE TABLE db_version(db_version INTEGER)',
'INSERT INTO db_version(db_version) VALUES (1)',
'CREATE TABLE network_timezones(network_name TEXT PRIMARY KEY, timezone TEXT)'
]),
('consolidate_providers', [
'CREATE TABLE provider_cache(provider TEXT, name TEXT, season NUMERIC, episodes TEXT,'
' indexerid NUMERIC, url TEXT UNIQUE, time NUMERIC, quality TEXT, release_group TEXT, version NUMERIC)',
'CREATE TABLE network_conversions('
'tvdb_network TEXT PRIMARY KEY, tvrage_network TEXT, tvrage_country TEXT)',
'CREATE INDEX tvrage_idx ON network_conversions(tvrage_network, tvrage_country)'
]),
('add_backlogparts', [
'CREATE TABLE backlogparts('
'part NUMERIC NOT NULL, indexer NUMERIC NOT NULL, indexerid NUMERIC NOT NULL)',
'CREATE TABLE lastrecentsearch(name TEXT PRIMARY KEY NOT NULL, datetime NUMERIC NOT NULL)'
]),
('add_provider_fails', [
'CREATE TABLE provider_fails(prov_name TEXT, fail_type INTEGER, fail_code INTEGER, fail_time NUMERIC)',
'CREATE INDEX idx_prov_name_error ON provider_fails (prov_name)',
'CREATE UNIQUE INDEX idx_prov_errors ON provider_fails (prov_name, fail_time)',
'CREATE TABLE provider_fails_count(prov_name TEXT PRIMARY KEY,'
' failure_count NUMERIC, failure_time NUMERIC,'
' tmr_limit_count NUMERIC, tmr_limit_time NUMERIC, tmr_limit_wait NUMERIC)'
])
])
def test(self):
return self.hasTable('lastUpdate')
def execute(self):
queries = [
'CREATE TABLE lastUpdate (provider TEXT, time NUMERIC)',
'CREATE TABLE lastSearch (provider TEXT, time NUMERIC)',
'CREATE TABLE db_version (db_version INTEGER)',
'INSERT INTO db_version (db_version) VALUES (1)',
'CREATE TABLE network_timezones (network_name TEXT PRIMARY KEY, timezone TEXT)',
'CREATE TABLE network_conversions ('
'tvdb_network TEXT PRIMARY KEY, tvrage_network TEXT, tvrage_country TEXT)',
'CREATE INDEX tvrage_idx on network_conversions (tvrage_network, tvrage_country)',
'CREATE TABLE provider_cache (provider TEXT ,name TEXT, season NUMERIC, episodes TEXT,'
' indexerid NUMERIC, url TEXT UNIQUE, time NUMERIC, quality TEXT, release_group TEXT, '
'version NUMERIC)',
'CREATE TABLE IF NOT EXISTS "backlogparts" ("part" NUMERIC NOT NULL ,'
' "indexer" NUMERIC NOT NULL , "indexerid" NUMERIC NOT NULL )',
'CREATE TABLE IF NOT EXISTS "lastrecentsearch" ("name" TEXT PRIMARY KEY NOT NULL'
' , "datetime" NUMERIC NOT NULL )',
]
for query in queries:
self.connection.action(query)
self.setDBVersion(3)
self.do_query(self.queries.values())
self.setDBVersion(MAX_DB_VERSION)
def backup(self):
db.backup_database('cache.db', self.checkDBVersion())
class ConsolidateProviders(InitialSchema):
def test(self):
return self.checkDBVersion() > 1
return 1 < self.checkDBVersion()
def execute(self):
db.backup_database('cache.db', self.checkDBVersion())
if self.hasTable('provider_cache'):
self.connection.action('DROP TABLE provider_cache')
self.connection.action('CREATE TABLE provider_cache (provider TEXT, name TEXT, season NUMERIC, episodes TEXT, '
'indexerid NUMERIC, url TEXT UNIQUE, time NUMERIC, quality TEXT, release_group TEXT, '
'version NUMERIC)')
if not self.hasTable('network_conversions'):
self.connection.action('CREATE TABLE network_conversions ' +
'(tvdb_network TEXT PRIMARY KEY, tvrage_network TEXT, tvrage_country TEXT)')
self.connection.action('CREATE INDEX tvrage_idx ' +
'on network_conversions (tvrage_network, tvrage_country)')
keep_tables = set(['lastUpdate', 'lastSearch', 'db_version',
'network_timezones', 'network_conversions', 'provider_cache'])
current_tables = set(self.listTables())
remove_tables = list(current_tables - keep_tables)
for table in remove_tables:
self.connection.action('DROP TABLE [%s]' % table)
self.incDBVersion()
self.backup()
keep_tables = {'lastUpdate', 'lastSearch', 'db_version',
'network_timezones', 'network_conversions', 'provider_cache'}
# old provider_cache is dropped before re-creation
self.do_query(['DROP TABLE [provider_cache]'] + self.queries['consolidate_providers'] +
['DROP TABLE [%s]' % t for t in (set(self.listTables()) - keep_tables)])
self.finish(True)
class AddBacklogParts(ConsolidateProviders):
def test(self):
return self.checkDBVersion() > 2
return 2 < self.checkDBVersion()
def execute(self):
db.backup_database('cache.db', self.checkDBVersion())
if self.hasTable('scene_names'):
self.connection.action('DROP TABLE scene_names')
if not self.hasTable('backlogparts'):
self.connection.action('CREATE TABLE IF NOT EXISTS "backlogparts" ("part" NUMERIC NOT NULL ,'
' "indexer" NUMERIC NOT NULL , "indexerid" NUMERIC NOT NULL )')
if not self.hasTable('lastrecentsearch'):
self.connection.action('CREATE TABLE IF NOT EXISTS "lastrecentsearch" ("name" TEXT PRIMARY KEY NOT NULL'
' , "datetime" NUMERIC NOT NULL )')
if self.hasTable('scene_exceptions_refresh'):
self.connection.action('DROP TABLE scene_exceptions_refresh')
if self.hasTable('scene_exceptions'):
self.connection.action('DROP TABLE scene_exceptions')
self.connection.action('VACUUM')
self.incDBVersion()
self.backup()
self.do_query(self.queries['add_backlogparts'] +
['DROP TABLE [%s]' % t for t in ('scene_names', 'scene_exceptions_refresh', 'scene_exceptions')])
self.finish(True)
class AddProviderErrors(AddBacklogParts):
class AddProviderFailureHandling(AddBacklogParts):
def test(self):
return self.checkDBVersion() > 3
return 3 < self.checkDBVersion()
def execute(self):
db.backup_database('cache.db', self.checkDBVersion())
if not self.hasTable('providererrors'):
self.connection.action('CREATE TABLE providererrors ("prov_name" TEXT, "error_type" INTEGER, '
'"error_code" INTEGER, "error_time" NUMERIC)')
self.connection.action('CREATE INDEX idx_prov_name_error ON providererrors (prov_name)')
self.connection.action('CREATE UNIQUE INDEX idx_prov_errors ON providererrors (prov_name, error_time)')
if not self.hasTable('providererrorcount'):
self.connection.action('CREATE TABLE providererrorcount (prov_name TEXT PRIMARY KEY , '
'failure_count NUMERIC, failure_time NUMERIC, hit_limit_count NUMERIC, '
'hit_limit_time NUMERIC, hit_limit_wait NUMERIC)')
self.connection.action('VACUUM')
self.incDBVersion()
self.backup()
self.do_query(self.queries['add_provider_fails'])
self.finish()

View file

@ -432,6 +432,26 @@ class SchemaUpgrade(object):
tables.append(table[0])
return tables
def do_query(self, queries):
if not isinstance(queries, list):
queries = list(queries)
elif isinstance(queries[0], list):
queries = [item for sublist in queries for item in sublist]
for query in queries:
tbl_name = re.findall('(?i)DROP.*?TABLE.*?\[?([^\s\]]+)', query)
if tbl_name and not self.hasTable(tbl_name[0]):
continue
tbl_name = re.findall('(?i)CREATE.*?TABLE.*?\s([^\s(]+)\s*\(', query)
if tbl_name and self.hasTable(tbl_name[0]):
continue
self.connection.action(query)
def finish(self, tbl_dropped=False):
if tbl_dropped:
self.connection.action('VACUUM')
self.incDBVersion()
def MigrationCode(myDB):
schema = {

View file

@ -65,6 +65,8 @@ class AlphaRatioProvider(generic.TorrentProvider):
search_url = self.urls['search'] % (search_string, ('&freetorrent=1', '')[not self.freeleech])
html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode])
try:

View file

@ -38,7 +38,7 @@ class AnizbProvider(generic.NZBProvider):
for params in search_params[mode]:
search_url = '%sapi/%s' % (self.url, params and (('?q=%s', '?q=%(q)s')['q' in params] % params) or '')
data = self.cache.getRSSFeed(search_url)
data = self.cache.get_rss(search_url)
time.sleep(1.1)
cnt = len(results)

View file

@ -73,6 +73,8 @@ class BeyondHDProvider(generic.TorrentProvider):
search_url += self.urls['search'] % re.sub('[.\s]+', ' ', search_string)
data_json = self.get_url(search_url, json=True)
if self.should_skip():
return results
cnt = len(items[mode])
if data_json and 'results' in data_json and self._check_auth_from_data(data_json):

View file

@ -71,6 +71,8 @@ class BitHDTVProvider(generic.TorrentProvider):
search_url = self.urls['search'] % (search_string, self._categories_string(mode))
html = self.get_url(search_url, timeout=90)
if self.should_skip():
return results
cnt = len(items[mode])
try:

View file

@ -64,6 +64,8 @@ class BitmetvProvider(generic.TorrentProvider):
search_url = self.urls['search'] % (self._categories_string(mode, 'cat=%s'), search_string)
html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode])
try:

View file

@ -105,6 +105,8 @@ class BlutopiaProvider(generic.TorrentProvider):
self.token, '+'.join(search_string.split()), self._categories_string(mode, ''), '', '', '')
resp = self.get_url(search_url, json=True)
if self.should_skip():
return results
cnt = len(items[mode])
try:

View file

@ -56,6 +56,7 @@ class BTNProvider(generic.TorrentProvider):
self.ua = self.session.headers['User-Agent']
self.reject_m2ts = False
self.cache = BTNCache(self)
self.has_limit = True
def _authorised(self, **kwargs):
@ -67,6 +68,15 @@ class BTNProvider(generic.TorrentProvider):
raise AuthException('Must set Api key or Username/Password for %s in config provider options' % self.name)
return True
def _check_response(self, data, url, post_data=None, post_json=None):
if not self.should_skip(log_warning=False):
if data and 'Call Limit' in data:
self.tmr_limit_update('1', 'h', '150/hr %s' % data)
self.log_failure_url(url, post_data, post_json)
else:
logger.log(u'Action prematurely ended. %(prov)s server error response = %(desc)s' %
{'prov': self.name, 'desc': data}, logger.WARNING)
def _search_provider(self, search_params, age=0, **kwargs):
self._authorised()
@ -93,21 +103,19 @@ class BTNProvider(generic.TorrentProvider):
self.api_key, json.dumps(param_dct), items_per_page, offset))
try:
response = None
response, error_text = None, None
if api_up and self.api_key:
self.session.headers['Content-Type'] = 'application/json-rpc'
response = helpers.getURL(
self.url_api, post_data=json_rpc(params), session=self.session, json=True)
if not response:
api_up = False
results = self.html(mode, search_string, results)
error_text = response['error']['message']
logger.log(
('Call Limit' in error_text
and u'Action aborted because the %(prov)s 150 calls/hr limit was reached'
or u'Action prematurely ended. %(prov)s server error response = %(desc)s') %
{'prov': self.name, 'desc': error_text}, logger.WARNING)
return results
response = self.get_url(self.url_api, post_data=json_rpc(params), json=True)
# response = {'error': {'message': 'Call Limit Exceeded Test'}}
error_text = response['error']['message']
api_up = False
if 'Propers' == mode:
return results
results = self.html(mode, search_string, results)
if not results:
self._check_response(error_text, self.url_api, post_data=json_rpc(params))
return results
except AuthException:
logger.log('API looks to be down, add un/pw config detail to be used as a fallback', logger.WARNING)
except (KeyError, Exception):
@ -115,7 +123,7 @@ class BTNProvider(generic.TorrentProvider):
data_json = response and 'result' in response and response['result'] or {}
if data_json:
self.tmr_limit_count = 0
found_torrents = 'torrents' in data_json and data_json['torrents'] or {}
# We got something, we know the API sends max 1000 results at a time.
@ -134,15 +142,10 @@ class BTNProvider(generic.TorrentProvider):
for page in range(1, pages_needed + 1):
try:
response = helpers.getURL(
self.url_api, json=True, session=self.session,
post_data=json_rpc(params, results_per_page, page * results_per_page))
post_data = json_rpc(params, results_per_page, page * results_per_page)
response = self.get_url(self.url_api, json=True, post_data=post_data)
error_text = response['error']['message']
logger.log(
('Call Limit' in error_text
and u'Action prematurely ended because the %(prov)s 150 calls/hr limit was reached'
or u'Action prematurely ended. %(prov)s server error response = %(desc)s') %
{'prov': self.name, 'desc': error_text}, logger.WARNING)
self._check_response(error_text, self.url_api, post_data=post_data)
return results
except (KeyError, Exception):
data_json = response and 'result' in response and response['result'] or {}
@ -150,6 +153,7 @@ class BTNProvider(generic.TorrentProvider):
# Note that this these are individual requests and might time out individually.
# This would result in 'gaps' in the results. There is no way to fix this though.
if 'torrents' in data_json:
self.tmr_limit_count = 0
found_torrents.update(data_json['torrents'])
cnt = len(results)
@ -176,7 +180,8 @@ class BTNProvider(generic.TorrentProvider):
if self.username and self.password:
return super(BTNProvider, self)._authorised(
post_params={'login': 'Log In!'}, logged_in=(lambda y='': 'casThe' in y[0:4096]))
post_params={'login': 'Log In!'},
logged_in=(lambda y='': 'casThe' in y[0:512] and '<title>Index' in y[0:512]))
raise AuthException('Password or Username for %s is empty in config provider options' % self.name)
def html(self, mode, search_string, results):
@ -197,7 +202,10 @@ class BTNProvider(generic.TorrentProvider):
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
search_url = self.urls['search'] % (search_string, self._categories_string(mode, 'filter_cat[%s]=1'))
html = helpers.getURL(search_url, session=self.session)
html = self.get_url(search_url, use_tmr_limit=False)
if self.should_skip(log_warning=False, use_tmr_limit=False):
return results
cnt = len(results)
try:
if not html or self._has_no_results(html):

View file

@ -64,7 +64,7 @@ class BTSceneProvider(generic.TorrentProvider):
url = self.url
response = self.get_url(url)
if not response:
if self.should_skip():
return results
form = re.findall('(?is)(<form[^>]+)', response)
@ -84,6 +84,8 @@ class BTSceneProvider(generic.TorrentProvider):
else url + self.urls['search'] % (urllib.quote_plus(search_string))
html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode])
try:

View file

@ -65,6 +65,8 @@ class DHProvider(generic.TorrentProvider):
html = self.get_url(self.urls['search'] % (
'+'.join(search_string.split()), self._categories_string(mode), ('3', '0')[not self.freeleech]))
if self.should_skip():
return results
cnt = len(items[mode])
try:

View file

@ -62,6 +62,8 @@ class ETTVProvider(generic.TorrentProvider):
self._categories_string(mode), ('%2B ', '')['Cache' == mode] + '.'.join(search_string.split()))
html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode])
try:
@ -110,6 +112,9 @@ class ETTVProvider(generic.TorrentProvider):
def get_data(self, url):
result = None
html = self.get_url(url, timeout=90)
if self.should_skip():
return result
try:
result = re.findall('(?i)"(magnet:[^"]+?)">', html)[0]
except IndexError:

View file

@ -83,6 +83,8 @@ class FanoProvider(generic.TorrentProvider):
search_url = self.urls['search'] % (search_string, self._categories_string(mode))
html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode])
try:

View file

@ -62,6 +62,8 @@ class FLProvider(generic.TorrentProvider):
html = self.get_url(self.urls['search'] % ('+'.join(search_string.split()),
self._categories_string(mode, template='cats[]=%s')))
if self.should_skip():
return results
cnt = len(items[mode])
try:

View file

@ -66,6 +66,8 @@ class FunFileProvider(generic.TorrentProvider):
search_url = self.urls['search'] % (self._categories_string(mode), search_string)
html = self.get_url(search_url, timeout=self.url_timeout)
if self.should_skip():
return results
cnt = len(items[mode])
try:

View file

@ -48,36 +48,39 @@ from sickbeard.name_parser.parser import NameParser, InvalidNameException, Inval
from sickbeard.show_name_helpers import get_show_names_all_possible
from sickbeard.sbdatetime import sbdatetime
class HaltParseException(SickBeardException):
"""Something requires the current processing to abort"""
class ProviderErrorTypes:
class ProviderFailTypes:
http = 1
connection = 2
connection_timeout = 3
timeout = 4
unknown = 5
other = 5
limit = 6
nodata = 7
names = {1: 'http', 2: 'connection', 3: 'connection_timeout', 4: 'timeout', 5: 'unknown', 6: 'limit', 7: 'nodata'}
names = {http: 'http', timeout: 'timeout',
connection: 'connection', connection_timeout: 'connection_timeout',
nodata: 'nodata', other: 'other', limit: 'limit'}
def __init__(self):
pass
class ProviderError(object):
def __init__(self, error_type=ProviderErrorTypes.unknown, code=None, error_time=None):
class ProviderFail(object):
def __init__(self, fail_type=ProviderFailTypes.other, code=None, fail_time=None):
self.code = code
self.error_type = error_type
self.error_time = (datetime.datetime.now(), error_time)[isinstance(error_time, datetime.datetime)]
self.fail_type = fail_type
self.fail_time = (datetime.datetime.now(), fail_time)[isinstance(fail_time, datetime.datetime)]
class ProviderErrorList(object):
class ProviderFailList(object):
def __init__(self, provider_name):
self.provider_name = provider_name
self._errors = []
self._fails = []
self.lock = threading.Lock()
self.clear_old()
self.load_list()
@ -85,38 +88,68 @@ class ProviderErrorList(object):
self.dirty = False
@property
def errors(self):
return self._errors
def fails(self):
return self._fails
@property
def errors_sorted(self):
error_dict = {}
b_d = {'count': 0, 'code': None}
for e in self._errors:
dd = e.error_time.date()
if ProviderErrorTypes.names[e.error_type] not in error_dict.get(dd, {}):
error_dict.setdefault(dd,
{'date': dd, 'http': b_d.copy(), 'connection': b_d.copy(),
'connection_timeout': b_d.copy(), 'timeout': b_d.copy(),
'unknown': b_d.copy(), 'limit': b_d.copy(),
'nodata': b_d.copy()})[ProviderErrorTypes.names[e.error_type]]['count'] = 1
def fails_sorted(self):
fail_dict = {}
b_d = {'count': 0}
for e in self._fails:
fail_date = e.fail_time.date()
fail_hour = e.fail_time.time().hour
date_time = datetime.datetime.combine(fail_date, datetime.time(hour=fail_hour))
if ProviderFailTypes.names[e.fail_type] not in fail_dict.get(date_time, {}):
default = {'date': str(fail_date), 'date_time': date_time, 'multirow': False}
for et in ProviderFailTypes.names.itervalues():
default[et] = b_d.copy()
fail_dict.setdefault(date_time, default)[ProviderFailTypes.names[e.fail_type]]['count'] = 1
else:
error_dict[dd][ProviderErrorTypes.names[e.error_type]]['count'] += 1
if ProviderErrorTypes.http == e.error_type:
if e.code in error_dict[dd].get(ProviderErrorTypes.names[e.error_type], {}):
error_dict[dd][ProviderErrorTypes.names[e.error_type]][e.code] += 1
fail_dict[date_time][ProviderFailTypes.names[e.fail_type]]['count'] += 1
if ProviderFailTypes.http == e.fail_type:
if e.code in fail_dict[date_time].get(ProviderFailTypes.names[e.fail_type],
{'code': {}}).get('code', {}):
fail_dict[date_time][ProviderFailTypes.names[e.fail_type]]['code'][e.code] += 1
else:
error_dict[dd][ProviderErrorTypes.names[e.error_type]][e.code] = 1
error_list = sorted([error_dict[k] for k in error_dict.iterkeys()], key=lambda x: x.get('date'), reverse=True)
return error_list
fail_dict[date_time][ProviderFailTypes.names[e.fail_type]].setdefault('code', {})[e.code] = 1
def add_error(self, error):
if isinstance(error, ProviderError):
row_count = {}
for (k, v) in fail_dict.iteritems():
row_count.setdefault(v.get('date'), 0)
if v.get('date') in row_count:
row_count[v.get('date')] += 1
for (k, v) in fail_dict.iteritems():
if 1 < row_count.get(v.get('date')):
fail_dict[k]['multirow'] = True
fail_list = sorted([fail_dict[k] for k in fail_dict.iterkeys()], key=lambda y: y.get('date_time'), reverse=True)
totals = {}
for fail_date in set([fail.get('date') for fail in fail_list]):
daytotals = {}
for et in ProviderFailTypes.names.itervalues():
daytotals.update({et: sum([x.get(et).get('count') for x in fail_list if fail_date == x.get('date')])})
totals.update({fail_date: daytotals})
for (fail_date, total) in totals.iteritems():
for i, item in enumerate(fail_list):
if fail_date == item.get('date'):
if item.get('multirow'):
fail_list[i:i] = [item.copy()]
for et in ProviderFailTypes.names.itervalues():
fail_list[i][et] = {'count': total[et]}
if et == ProviderFailTypes.names[ProviderFailTypes.http]:
fail_list[i][et]['code'] = {}
break
return fail_list
def add_fail(self, fail):
if isinstance(fail, ProviderFail):
with self.lock:
self.dirty = True
self._errors.append(error)
logger.log('Adding error: %s for %s' %
(ProviderErrorTypes.names.get(error.error_type, 'unknown'), self.provider_name()),
self._fails.append(fail)
logger.log('Adding fail.%s for %s' % (ProviderFailTypes.names.get(
fail.fail_type, ProviderFailTypes.names[ProviderFailTypes.other]), self.provider_name()),
logger.DEBUG)
self.save_list()
@ -124,29 +157,29 @@ class ProviderErrorList(object):
if self.dirty:
self.clear_old()
with self.lock:
myDB = db.DBConnection('cache.db')
my_db = db.DBConnection('cache.db')
cl = []
for e in self._errors:
cl.append(['INSERT OR IGNORE INTO providererrors (prov_name, error_type, error_code, error_time) '
'VALUES (?,?,?,?)', [self.provider_name(), e.error_type, e.code,
sbdatetime.totimestamp(e.error_time)]])
for f in self._fails:
cl.append(['INSERT OR IGNORE INTO provider_fails (prov_name, fail_type, fail_code, fail_time) '
'VALUES (?,?,?,?)', [self.provider_name(), f.fail_type, f.code,
sbdatetime.totimestamp(f.fail_time)]])
self.dirty = False
if cl:
myDB.mass_action(cl)
my_db.mass_action(cl)
self.last_save = datetime.datetime.now()
def load_list(self):
with self.lock:
try:
myDB = db.DBConnection('cache.db')
if myDB.hasTable('providererrors'):
results = myDB.select('SELECT * FROM providererrors WHERE prov_name = ?', [self.provider_name()])
self._errors = []
my_db = db.DBConnection('cache.db')
if my_db.hasTable('provider_fails'):
results = my_db.select('SELECT * FROM provider_fails WHERE prov_name = ?', [self.provider_name()])
self._fails = []
for r in results:
try:
self._errors.append(ProviderError(
error_type=helpers.tryInt(r['error_type']), code=helpers.tryInt(r['error_code']),
error_time=datetime.datetime.fromtimestamp(helpers.tryInt(r['error_time']))))
self._fails.append(ProviderFail(
fail_type=helpers.tryInt(r['fail_type']), code=helpers.tryInt(r['fail_code']),
fail_time=datetime.datetime.fromtimestamp(helpers.tryInt(r['fail_time']))))
except (StandardError, Exception):
continue
except (StandardError, Exception):
@ -155,10 +188,10 @@ class ProviderErrorList(object):
def clear_old(self):
with self.lock:
try:
myDB = db.DBConnection('cache.db')
if myDB.hasTable('providererrors'):
my_db = db.DBConnection('cache.db')
if my_db.hasTable('provider_fails'):
time_limit = sbdatetime.totimestamp(datetime.datetime.now() - datetime.timedelta(days=28))
myDB.action('DELETE FROM providererrors WHERE error_time < ?', [time_limit])
my_db.action('DELETE FROM provider_fails WHERE fail_time < ?', [time_limit])
except (StandardError, Exception):
pass
@ -200,49 +233,50 @@ class GenericProvider(object):
self._failure_count = 0
self._failure_time = None
self.errors = ProviderErrorList(self.get_id)
self._hit_limit_count = 0
self._hit_limit_time = None
self._hit_limit_wait = None
self._last_error_type = None
self.fails = ProviderFailList(self.get_id)
self._tmr_limit_count = 0
self._tmr_limit_time = None
self._tmr_limit_wait = None
self._last_fail_type = None
self.has_limit = False
self.fail_times = {1: (0, 15), 2: (0, 30), 3: (1, 0), 4: (2, 0), 5: (3, 0), 6: (6, 0), 7: (12, 0), 8: (24, 0)}
self._load_error_values()
self._load_fail_values()
def _load_error_values(self):
def _load_fail_values(self):
if hasattr(sickbeard, 'DATA_DIR'):
myDB = db.DBConnection('cache.db')
if myDB.hasTable('providererrorcount'):
r = myDB.select('SELECT * FROM providererrorcount WHERE prov_name = ?', [self.get_id()])
my_db = db.DBConnection('cache.db')
if my_db.hasTable('provider_fails_count'):
r = my_db.select('SELECT * FROM provider_fails_count WHERE prov_name = ?', [self.get_id()])
if r:
self._failure_count = helpers.tryInt(r[0]['failure_count'], 0)
if r[0]['failure_time']:
self._failure_time = datetime.datetime.fromtimestamp(r[0]['failure_time'])
else:
self._failure_time = None
self._hit_limit_count = helpers.tryInt(r[0]['hit_limit_count'], 0)
if r[0]['hit_limit_time']:
self._hit_limit_time = datetime.datetime.fromtimestamp(r[0]['hit_limit_time'])
self._tmr_limit_count = helpers.tryInt(r[0]['tmr_limit_count'], 0)
if r[0]['tmr_limit_time']:
self._tmr_limit_time = datetime.datetime.fromtimestamp(r[0]['tmr_limit_time'])
else:
self._hit_limit_time = None
if r[0]['hit_limit_wait']:
self._hit_limit_wait = datetime.timedelta(seconds=helpers.tryInt(r[0]['hit_limit_wait'], 0))
self._tmr_limit_time = None
if r[0]['tmr_limit_wait']:
self._tmr_limit_wait = datetime.timedelta(seconds=helpers.tryInt(r[0]['tmr_limit_wait'], 0))
else:
self._hit_limit_wait = None
self._last_error_type = self.last_error
self._tmr_limit_wait = None
self._last_fail_type = self.last_fail
def _save_error_value(self, field, value):
myDB = db.DBConnection('cache.db')
if myDB.hasTable('providererrorcount'):
r = myDB.action('UPDATE providererrorcount SET %s = ? WHERE prov_name = ?' % field, [value, self.get_id()])
def _save_fail_value(self, field, value):
my_db = db.DBConnection('cache.db')
if my_db.hasTable('provider_fails_count'):
r = my_db.action('UPDATE provider_fails_count SET %s = ? WHERE prov_name = ?' % field,
[value, self.get_id()])
if 0 == r.rowcount:
myDB.action('REPLACE INTO providererrorcount (prov_name, %s) VALUES (?,?)' % field,
[self.get_id(), value])
my_db.action('REPLACE INTO provider_fails_count (prov_name, %s) VALUES (?,?)' % field,
[self.get_id(), value])
@property
def last_error(self):
def last_fail(self):
try:
return sorted(self.errors.errors, key=lambda x: x.error_time, reverse=True)[0].error_type
return sorted(self.fails.fails, key=lambda x: x.fail_time, reverse=True)[0].fail_type
except (StandardError, Exception):
return None
@ -255,7 +289,7 @@ class GenericProvider(object):
changed_val = self._failure_count != value
self._failure_count = value
if changed_val:
self._save_error_value('failure_count', value)
self._save_fail_value('failure_count', value)
@property
def failure_time(self):
@ -266,158 +300,252 @@ class GenericProvider(object):
if None is value or isinstance(value, datetime.datetime):
changed_val = self._failure_time != value
self._failure_time = value
if None is value:
v = value
else:
v = sbdatetime.totimestamp(value)
if changed_val:
self._save_error_value('failure_time', v)
self._save_fail_value('failure_time', (sbdatetime.totimestamp(value), value)[None is value])
@property
def hit_limit_count(self):
return self._hit_limit_count
def tmr_limit_count(self):
return self._tmr_limit_count
@hit_limit_count.setter
def hit_limit_count(self, value):
changed_val = self._hit_limit_count != value
self._hit_limit_count = value
@tmr_limit_count.setter
def tmr_limit_count(self, value):
changed_val = self._tmr_limit_count != value
self._tmr_limit_count = value
if changed_val:
self._save_error_value('hit_limit_count', value)
self._save_fail_value('tmr_limit_count', value)
@property
def hit_limit_time(self):
return self._hit_limit_time
def tmr_limit_time(self):
return self._tmr_limit_time
@hit_limit_time.setter
def hit_limit_time(self, value):
@tmr_limit_time.setter
def tmr_limit_time(self, value):
if None is value or isinstance(value, datetime.datetime):
changed_val = self._hit_limit_time != value
self._hit_limit_time = value
if None is value:
v = value
else:
v = sbdatetime.totimestamp(value)
changed_val = self._tmr_limit_time != value
self._tmr_limit_time = value
if changed_val:
self._save_error_value('hit_limit_time', v)
self._save_fail_value('tmr_limit_time', (sbdatetime.totimestamp(value), value)[None is value])
@property
def max_index(self):
return len(self.fail_times)
@property
def hit_limit_wait(self):
return self._hit_limit_wait
def tmr_limit_wait(self):
return self._tmr_limit_wait
@hit_limit_wait.setter
def hit_limit_wait(self, value):
if isinstance(getattr(self, 'errors', None), ProviderErrorList) and isinstance(value, datetime.timedelta):
self.errors.add_error(ProviderError(error_type=ProviderErrorTypes.limit))
changed_val = self._hit_limit_wait != value
self._hit_limit_wait = value
@tmr_limit_wait.setter
def tmr_limit_wait(self, value):
if isinstance(getattr(self, 'fails', None), ProviderFailList) and isinstance(value, datetime.timedelta):
self.fails.add_fail(ProviderFail(fail_type=ProviderFailTypes.limit))
changed_val = self._tmr_limit_wait != value
self._tmr_limit_wait = value
if changed_val:
if None is value:
self._save_error_value('hit_limit_wait', value)
self._save_fail_value('tmr_limit_wait', value)
elif isinstance(value, datetime.timedelta):
self._save_error_value('hit_limit_wait', value.total_seconds())
self._save_fail_value('tmr_limit_wait', value.total_seconds())
def fail_time_index(self, base_limit=2):
i = self.failure_count - base_limit
return (i, self.max_index)[i >= self.max_index]
def wait_time(self, fc):
return datetime.timedelta(hours=self.fail_times[fc][0], minutes=self.fail_times[fc][1])
def tmr_limit_update(self, period, unit, desc):
self.tmr_limit_time = datetime.datetime.now()
self.tmr_limit_count += 1
limit_set = False
if None not in (period, unit):
limit_set = True
if unit in ('s', 'sec', 'secs', 'seconds', 'second'):
self.tmr_limit_wait = datetime.timedelta(seconds=helpers.tryInt(period))
elif unit in ('m', 'min', 'mins', 'minutes', 'minute'):
self.tmr_limit_wait = datetime.timedelta(minutes=helpers.tryInt(period))
elif unit in ('h', 'hr', 'hrs', 'hours', 'hour'):
self.tmr_limit_wait = datetime.timedelta(hours=helpers.tryInt(period))
elif unit in ('d', 'days', 'day'):
self.tmr_limit_wait = datetime.timedelta(days=helpers.tryInt(period))
else:
limit_set = False
if not limit_set:
time_index = self.fail_time_index(base_limit=0)
self.tmr_limit_wait = self.wait_time(time_index)
logger.log('Request limit reached. Waiting for %s until next retry. Message: %s' %
(self.tmr_limit_wait, desc or 'none found'), logger.WARNING)
def wait_time(self, time_index=None):
"""
Return a suitable wait time, selected by parameter, or based on the current failure count
:param time_index: A key value index into the fail_times dict, or selects using failure count if None
:type time_index: Integer
:return: Time
:rtype: Timedelta
"""
if None is time_index:
time_index = self.fail_time_index()
return datetime.timedelta(hours=self.fail_times[time_index][0], minutes=self.fail_times[time_index][1])
def fail_newest_delta(self):
"""
Return how long since most recent failure
:return: Period since most recent failure on record
:rtype: timedelta
"""
return datetime.datetime.now() - self.failure_time
def is_waiting(self):
return self.fail_newest_delta() < self.wait_time()
def valid_tmr_time(self):
return isinstance(self.tmr_limit_wait, datetime.timedelta) and \
isinstance(self.tmr_limit_time, datetime.datetime)
@property
def get_next_try_time(self):
n = None
h = datetime.timedelta(seconds=0)
f = datetime.timedelta(seconds=0)
if isinstance(self.hit_limit_wait, datetime.timedelta) and isinstance(self.hit_limit_time, datetime.datetime):
h = self.hit_limit_time + self.hit_limit_wait - datetime.datetime.now()
if 3 <= self.failure_count and isinstance(self.failure_time, datetime.datetime):
fc = self.fail_time_index()
if datetime.datetime.now() - self.failure_time < self.wait_time(fc):
h = self.failure_time + self.wait_time(fc) - datetime.datetime.now()
if self.valid_tmr_time():
h = self.tmr_limit_time + self.tmr_limit_wait - datetime.datetime.now()
if 3 <= self.failure_count and isinstance(self.failure_time, datetime.datetime) and self.is_waiting():
h = self.failure_time + self.wait_time() - datetime.datetime.now()
if datetime.timedelta(seconds=0) < max((h, f)):
n = max((h, f))
return n
def retry_next(self):
if isinstance(self.hit_limit_wait, datetime.timedelta) and isinstance(self.hit_limit_time, datetime.datetime):
self.hit_limit_time = datetime.datetime.now() - self.hit_limit_wait
if 3 <= self.failure_count and isinstance(self.failure_time, datetime.datetime):
fc = self.fail_time_index()
if datetime.datetime.now() - self.failure_time < self.wait_time(fc):
self.failure_time = datetime.datetime.now() - self.wait_time(fc)
if self.valid_tmr_time():
self.tmr_limit_time = datetime.datetime.now() - self.tmr_limit_wait
if 3 <= self.failure_count and isinstance(self.failure_time, datetime.datetime) and self.is_waiting():
self.failure_time = datetime.datetime.now() - self.wait_time()
def should_skip(self, log_warning=True):
if isinstance(self.hit_limit_wait, datetime.timedelta) and isinstance(self.hit_limit_time, datetime.datetime):
time_left = self.hit_limit_time + self.hit_limit_wait - datetime.datetime.now()
@staticmethod
def fmt_delta(delta):
return str(delta).rsplit('.')[0]
def should_skip(self, log_warning=True, use_tmr_limit=True):
"""
Determine if a subsequent server request should be skipped. The result of this logic is based on most recent
server connection activity including, exhausted request limits, and counting connect failures to determine a
"cool down" period before recommending reconnection attempts; by returning False.
:param log_warning: Output to log if True (default) otherwise set False for no output.
:type log_warning: Boolean
:param use_tmr_limit: Setting this to False will ignore a tmr limit being reached and will instead return False.
:type use_tmr_limit: Boolean
:return: True for any known issue that would prevent a subsequent server connection, otherwise False.
:rtype: Boolean
"""
if self.valid_tmr_time():
time_left = self.tmr_limit_time + self.tmr_limit_wait - datetime.datetime.now()
if time_left > datetime.timedelta(seconds=0):
if log_warning:
logger.log('Hit limited reached, waiting for %s' % time_left, logger.WARNING)
return True
# Ensure provider name output (e.g. when displaying config/provs) instead of e.g. thread "Tornado"
prepend = ('[%s] :: ' % self.name, '')[any([x.name in threading.currentThread().getName()
for x in sickbeard.providers.sortedProviderList()])]
logger.log('%sToo many requests reached at %s, waiting for %s' % (
prepend, self.fmt_delta(self.tmr_limit_time), self.fmt_delta(time_left)), logger.WARNING)
return use_tmr_limit
else:
self.hit_limit_time = None
self.hit_limit_wait = None
self.tmr_limit_time = None
self.tmr_limit_wait = None
if 3 <= self.failure_count:
if None is self.failure_time:
self.failure_time = datetime.datetime.now()
fc = self.fail_time_index()
if datetime.datetime.now() - self.failure_time < self.wait_time(fc):
if self.is_waiting():
if log_warning:
time_left = self.wait_time(fc) - (datetime.datetime.now() - self.failure_time)
logger.log('Failed %s times, skipping provider for %s' % (self.failure_count, time_left),
logger.WARNING)
time_left = self.wait_time() - self.fail_newest_delta()
logger.log('Failed %s times, skipping provider for %s, last failure at %s with fail type: %s' % (
self.failure_count, self.fmt_delta(time_left), self.fmt_delta(self.failure_time),
ProviderFailTypes.names.get(
self.last_fail, ProviderFailTypes.names[ProviderFailTypes.other])), logger.WARNING)
return True
return False
def inc_failure_count(self, *args, **kwargs):
error_type = ('error_type' in kwargs and kwargs['error_type'].error_type) or \
(isinstance(args, tuple) and isinstance(args[0], ProviderError) and args[0].error_type)
fail_type = ('fail_type' in kwargs and kwargs['fail_type'].fail_type) or \
(isinstance(args, tuple) and isinstance(args[0], ProviderFail) and args[0].fail_type)
if not isinstance(self.failure_time, datetime.datetime) or \
error_type != self._last_error_type or \
datetime.datetime.now() - self.failure_time > datetime.timedelta(seconds=3):
fail_type != self._last_fail_type or \
self.fail_newest_delta() > datetime.timedelta(seconds=3):
self.failure_count += 1
self.failure_time = datetime.datetime.now()
self._last_error_type = error_type
self.errors.add_error(*args, **kwargs)
self._last_fail_type = fail_type
self.fails.add_fail(*args, **kwargs)
else:
logger.log('%s: Not logging same error within 3 seconds' % self.name, logger.DEBUG)
logger.log('%s: Not logging same failure within 3 seconds' % self.name, logger.DEBUG)
def getURL(self, *args, **kwargs):
def get_url(self, url, skip_auth=False, use_tmr_limit=True, *args, **kwargs):
"""
Return data from a URI with a possible check for authentication prior to the data fetch.
Raised errors and no data in responses are tracked for making future logic decisions.
:param url: Address where to fetch data from
:type url: String
:param skip_auth: Skip authentication check of provider if True
:type skip_auth: Boolean
:param use_tmr_limit: An API limit can be +ve before a fetch, but unwanted, set False to short should_skip
:type use_tmr_limit: Boolean
:param args: params to pass-through to getURL
:type args:
:param kwargs: keyword params to pass-through to getURL
:type kwargs:
:return: None or data fetched from URL
:rtype: String or Nonetype
"""
data = None
# check for auth
if not self._authorised() or self.should_skip():
return data
if (not skip_auth and not (self.is_public_access()
and type(self).__name__ not in ['TorrentRssProvider']) and not self._authorised()) \
or self.should_skip(use_tmr_limit=use_tmr_limit):
return
kwargs['raise_exceptions'] = True
kwargs['raise_status_code'] = True
for k, v in dict(headers=self.headers, hooks=dict(response=self.cb_response), session=self.session).items():
kwargs.setdefault(k, v)
post_data = kwargs.get('post_data')
post_json = kwargs.get('post_json')
# noinspection PyUnusedLocal
log_failure_url = False
try:
data = helpers.getURL(*args, **kwargs)
data = helpers.getURL(url, *args, **kwargs)
if data:
if 0 != self.failure_count:
logger.log('Unblocking provider: %s' % self.get_id(), logger.DEBUG)
self.failure_count = 0
self.failure_time = None
else:
self.inc_failure_count(ProviderError(error_type=ProviderErrorTypes.nodata))
self.inc_failure_count(ProviderFail(fail_type=ProviderFailTypes.nodata))
log_failure_url = True
except requests.exceptions.HTTPError as e:
self.inc_failure_count(ProviderError(error_type=ProviderErrorTypes.http, code=e.response.status_code))
except requests.exceptions.ConnectionError as e:
self.inc_failure_count(ProviderError(error_type=ProviderErrorTypes.connection))
except requests.exceptions.ReadTimeout as e:
self.inc_failure_count(ProviderError(error_type=ProviderErrorTypes.timeout))
except (requests.exceptions.Timeout, socket.timeout) as e:
self.inc_failure_count(ProviderError(error_type=ProviderErrorTypes.connection_timeout))
self.inc_failure_count(ProviderFail(fail_type=ProviderFailTypes.http, code=e.response.status_code))
except requests.exceptions.ConnectionError:
self.inc_failure_count(ProviderFail(fail_type=ProviderFailTypes.connection))
except requests.exceptions.ReadTimeout:
self.inc_failure_count(ProviderFail(fail_type=ProviderFailTypes.timeout))
except (requests.exceptions.Timeout, socket.timeout):
self.inc_failure_count(ProviderFail(fail_type=ProviderFailTypes.connection_timeout))
except (StandardError, Exception) as e:
self.inc_failure_count(ProviderError(error_type=ProviderErrorTypes.unknown))
log_failure_url = True
self.inc_failure_count(ProviderFail(fail_type=ProviderFailTypes.other))
self.errors.save_list()
self.fails.save_list()
if log_failure_url:
self.log_failure_url(url, post_data, post_json)
return data
def log_failure_url(self, url, post_data=None, post_json=None):
if self.should_skip(log_warning=False):
post = []
if post_data:
post += [' .. Post params: [%s]' % '&'.join([post_data])]
if post_json:
post += [' .. Json params: [%s]' % '&'.join([post_json])]
logger.log('Failure URL: %s%s' % (url, ''.join(post)), logger.WARNING)
def get_id(self):
return GenericProvider.make_id(self.name)
@ -484,19 +612,6 @@ class GenericProvider(object):
self.session.response = dict(url=r.url, status_code=r.status_code, elapsed=r.elapsed, from_cache=r.from_cache)
return r
def get_url(self, url, post_data=None, params=None, timeout=30, json=False):
"""
By default this is just a simple urlopen call but this method should be overridden
for providers with special URL requirements (like cookies)
"""
# check for auth
if not self._authorised():
return
return helpers.getURL(url, post_data=post_data, params=params, headers=self.headers, timeout=timeout,
session=self.session, json=json, hooks=dict(response=self.cb_response))
def download_result(self, result):
"""
Save the result to disk.
@ -1341,8 +1456,9 @@ class TorrentProvider(GenericProvider):
return None
if 10 < len(cur_url) and ((expire and (expire > int(time.time()))) or
self._has_signature(helpers.getURL(cur_url, session=self.session))):
self._has_signature(self.get_url(cur_url, skip_auth=True))):
if self.should_skip():
return None
for k, v in getattr(self, 'url_tmpl', {}).items():
self.urls[k] = v % {'home': cur_url, 'vars': getattr(self, 'url_vars', {}).get(k, '')}
@ -1402,15 +1518,17 @@ class TorrentProvider(GenericProvider):
if isinstance(url, type([])):
for i in range(0, len(url)):
helpers.getURL(url.pop(), session=self.session)
self.get_url(url.pop(), skip_auth=True)
if self.should_skip():
return False
passfield, userfield = None, None
if not url:
if hasattr(self, 'urls'):
url = self.urls.get('login_action')
if url:
response = helpers.getURL(url, session=self.session)
if None is response:
response = self.get_url(url, skip_auth=True)
if self.should_skip() or None is response:
return False
try:
post_params = isinstance(post_params, type({})) and post_params or {}
@ -1450,8 +1568,8 @@ class TorrentProvider(GenericProvider):
if self.password not in post_params.values():
post_params[(passfield, 'password')[not passfield]] = self.password
response = helpers.getURL(url, post_data=post_params, session=self.session, timeout=timeout)
if response:
response = self.get_url(url, skip_auth=True, post_data=post_params, timeout=timeout)
if not self.should_skip() and response:
if logged_in(response):
return True

View file

@ -66,6 +66,8 @@ class GFTrackerProvider(generic.TorrentProvider):
(self.urls['search'] % search_string, '')['Cache' == mode])
html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode])
try:

View file

@ -70,6 +70,8 @@ class GrabTheInfoProvider(generic.TorrentProvider):
(self.urls['search'] % search_string, '')['Cache' == mode])
html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode])
try:

View file

@ -105,6 +105,8 @@ class HD4FreeProvider(generic.TorrentProvider):
self.token, '+'.join(search_string.split()), self._categories_string(mode, ''), '', '', '')
resp = self.get_url(search_url, json=True)
if self.should_skip():
return results
cnt = len(items[mode])
try:

View file

@ -48,7 +48,7 @@ class HDBitsProvider(generic.TorrentProvider):
self.username, self.passkey, self.freeleech, self.minseed, self.minleech = 5 * [None]
def check_auth_from_data(self, parsed_json):
def _check_auth_from_data(self, parsed_json):
if 'status' in parsed_json and 5 == parsed_json.get('status') and 'message' in parsed_json:
logger.log(u'Incorrect username or password for %s: %s' % (self.name, parsed_json['message']), logger.DEBUG)
@ -112,9 +112,11 @@ class HDBitsProvider(generic.TorrentProvider):
search_url = self.urls['search']
json_resp = self.get_url(search_url, post_data=post_data, json=True)
if self.should_skip():
return results
try:
if not (json_resp and self.check_auth_from_data(json_resp) and 'data' in json_resp):
if not (json_resp and self._check_auth_from_data(json_resp) and 'data' in json_resp):
logger.log(u'Response from %s does not contain any json data, abort' % self.name, logger.ERROR)
return results
except AuthException as e:

View file

@ -83,6 +83,8 @@ class HDSpaceProvider(generic.TorrentProvider):
search_url += self.urls['search'] % rc['nodots'].sub(' ', search_string)
html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode])
try:

View file

@ -86,6 +86,8 @@ class HDTorrentsProvider(generic.TorrentProvider):
self._categories_string(mode, template='category[]=%s')
.replace('&category[]=Animation', ('&genre[]=Animation', '')[mode in ['Cache', 'Propers']]))
html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode])
try:

View file

@ -88,6 +88,8 @@ class IPTorrentsProvider(generic.TorrentProvider):
(';free', '')[not self.freeleech], (';o=seeders', '')['Cache' == mode])
html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode])
try:

View file

@ -67,6 +67,8 @@ class LimeTorrentsProvider(generic.TorrentProvider):
else self.urls['search'] % (urllib.quote_plus(search_string))
html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode])
try:

View file

@ -54,6 +54,8 @@ class MagnetDLProvider(generic.TorrentProvider):
search_url = self.urls['search'] % re.sub('[.\s]+', ' ', search_string)
html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode])
try:

View file

@ -65,6 +65,9 @@ class MoreThanProvider(generic.TorrentProvider):
# fetches 15 results by default, and up to 100 if allowed in user profile
html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode])
try:
if not html or self._has_no_results(html):

View file

@ -68,6 +68,8 @@ class NcoreProvider(generic.TorrentProvider):
# fetches 15 results by default, and up to 100 if allowed in user profile
html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode])
try:

View file

@ -51,7 +51,9 @@ class NebulanceProvider(generic.TorrentProvider):
post_params={'keeplogged': '1', 'form_tmpl': True}):
return False
if not self.user_authkey:
response = helpers.getURL(self.urls['user'], session=self.session, json=True)
response = self.get_url(self.urls['user'], skip_auth=True, json=True)
if self.should_skip():
return False
if 'response' in response:
self.user_authkey, self.user_passkey = [response['response'].get(v) for v in 'authkey', 'passkey']
return self.user_authkey
@ -74,6 +76,8 @@ class NebulanceProvider(generic.TorrentProvider):
search_url += self.urls['search'] % rc['nodots'].sub('+', search_string)
data_json = self.get_url(search_url, json=True)
if self.should_skip():
return results
cnt = len(items[mode])
try:

View file

@ -187,13 +187,13 @@ class NewznabProvider(generic.NZBProvider):
if datetime.date.today() - self._caps_need_apikey['date'] > datetime.timedelta(days=30) or \
not self._caps_need_apikey['need']:
self._caps_need_apikey['need'] = False
data = self.getURL('%s/api?t=caps' % self.url)
data = self.get_url('%s/api?t=caps' % self.url)
if data:
xml_caps = helpers.parse_xml(data)
if xml_caps is None or not hasattr(xml_caps, 'tag') or xml_caps.tag == 'error' or xml_caps.tag != 'caps':
api_key = self.maybe_apikey()
if isinstance(api_key, basestring) and api_key not in ('0', ''):
data = self.getURL('%s/api?t=caps&apikey=%s' % (self.url, api_key))
data = self.get_url('%s/api?t=caps&apikey=%s' % (self.url, api_key))
if data:
xml_caps = helpers.parse_xml(data)
if xml_caps and hasattr(xml_caps, 'tag') and xml_caps.tag == 'caps':
@ -296,7 +296,7 @@ class NewznabProvider(generic.NZBProvider):
return False
return super(NewznabProvider, self)._check_auth(is_required)
def check_auth_from_data(self, data):
def _check_auth_from_data(self, data, url):
if data is None or not hasattr(data, 'tag'):
return False
@ -312,23 +312,12 @@ class NewznabProvider(generic.NZBProvider):
elif '102' == code:
raise AuthException('Your account isn\'t allowed to use the API on %s, contact the admin.' % self.name)
elif '500' == code:
self.hit_limit_time = datetime.datetime.now()
self.hit_limit_count += 1
retry_time = re.search(r'Retry in (\d+)\W+([a-z]+)', description, flags=re.I)
if retry_time:
if retry_time.group(2) in ('s', 'sec', 'secs', 'seconds', 'second'):
self.hit_limit_wait = datetime.timedelta(seconds=helpers.tryInt(retry_time.group(1)))
elif retry_time.group(2) in ('m', 'min', 'mins', 'minutes', 'minute'):
self.hit_limit_wait = datetime.timedelta(minutes=helpers.tryInt(retry_time.group(1)))
elif retry_time.group(2) in ('h', 'hr', 'hrs', 'hours', 'hour'):
self.hit_limit_wait = datetime.timedelta(hours=helpers.tryInt(retry_time.group(1)))
elif retry_time.group(2) in ('d', 'days', 'day'):
self.hit_limit_wait = datetime.timedelta(days=helpers.tryInt(retry_time.group(1)))
if not self.hit_limit_wait:
fc = self.fail_time_index(base_limit=0)
self.hit_limit_wait = self.wait_time(fc)
logger.log('Request limit reached. Waiting for %s until next retry. Message: %s' %
(self.hit_limit_wait, description), logger.WARNING)
try:
retry_time, unit = re.findall(r'Retry in (\d+)\W+([a-z]+)', description, flags=re.I)[0]
except IndexError:
retry_time, unit = None, None
self.tmr_limit_update(retry_time, unit, description)
self.log_failure_url(url)
elif '910' == code:
logger.log(
'%s %s, please check with provider.' %
@ -339,7 +328,7 @@ class NewznabProvider(generic.NZBProvider):
logger.WARNING)
return False
self.hit_limit_count = 0
self.tmr_limit_count = 0
return True
def config_str(self):
@ -739,17 +728,13 @@ class NewznabProvider(generic.NZBProvider):
search_url = '%sapi?%s' % (self.url, urllib.urlencode(request_params))
i and time.sleep(2.1)
data = self.getURL(search_url)
data = self.get_url(search_url)
if self.should_skip():
break
if not data:
logger.log('No Data returned from %s' % self.name, logger.WARNING)
if self.should_skip() or not data:
break
# hack this in until it's fixed server side
if data and not data.startswith('<?xml'):
if not data.startswith('<?xml'):
data = '<?xml version="1.0" encoding="ISO-8859-1" ?>%s' % data
try:
@ -759,7 +744,7 @@ class NewznabProvider(generic.NZBProvider):
logger.log('Error trying to load %s RSS feed' % self.name, logger.WARNING)
break
if not self.check_auth_from_data(parsed_xml):
if not self._check_auth_from_data(parsed_xml, search_url):
break
if 'rss' != parsed_xml.tag:

View file

@ -53,6 +53,8 @@ class NyaaProvider(generic.TorrentProvider):
search_url = self.urls['search'] % ((0, 2)[self.confirmed], search_string)
html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode])
try:

View file

@ -99,10 +99,13 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
def get_data(self, url):
result = None
if url and False is self._init_api():
data = self.getURL(url, timeout=90)
data = self.get_url(url, timeout=90)
if self.should_skip():
return result
if data:
if re.search('(?i)limit.*?reached', data):
logger.log('Daily Nzb Download limit reached', logger.DEBUG)
self.tmr_limit_update('1', 'h', 'Your 24 hour limit of 10 NZBs has been reached')
self.log_failure_url(url)
elif '</nzb>' not in data or 'seem to be logged in' in data:
logger.log('Failed nzb data response: %s' % data, logger.DEBUG)
else:
@ -156,6 +159,8 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
url = self.urls['cache'] % urllib.urlencode(params)
response = self.get_url(url)
if self.should_skip():
return results
data = feedparser.parse(response.replace('<xml', '<?xml').replace('>\n<info>', '?>\n<feed>\n<info>')
.replace('<search_req>\n', '').replace('</search_req>\n', '')
@ -185,7 +190,9 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
search_url = self.urls['search'] % urllib.urlencode(params)
data_json = self.getURL(search_url, json=True)
data_json = self.get_url(search_url, json=True)
if self.should_skip():
return results
if data_json and self._check_auth_from_data(data_json, is_xml=False):
for item in data_json:
if 'release' in item and 'getnzb' in item:
@ -213,7 +220,9 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
'cat': 'cat=(?:%s)' % '|'.join(cats)}.items())
mode = ('search', 'cache')['' == search]
search_url = self.urls[mode + '_html'] % search
html = self.getURL(search_url)
html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(results)
try:
if not html:

View file

@ -59,6 +59,8 @@ class PiSexyProvider(generic.TorrentProvider):
search_url = self.urls['search'] % search_string
html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode])
try:

View file

@ -94,6 +94,8 @@ class PotUKProvider(generic.TorrentProvider):
params.setdefault(name, value)
del params['doprefs']
html = self.get_url(search_url, post_data=params)
if self.should_skip():
return results
cnt = len(items[mode])
try:
@ -135,6 +137,9 @@ class PotUKProvider(generic.TorrentProvider):
def get_data(self, url):
result = None
html = self.get_url(url, timeout=90)
if self.should_skip():
return result
try:
result = self._link(re.findall('(?i)"(attachment\.php[^"]+?)"', html)[0])
except IndexError:

View file

@ -16,7 +16,6 @@
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
from . import generic
from sickbeard.rssfeeds import RSSFeeds
from lib.unidecode import unidecode
@ -52,7 +51,7 @@ class PreToMeProvider(generic.TorrentProvider):
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
search_url = url + (self.urls['search'] % search_string, '')['Cache' == mode]
xml_data = RSSFeeds(self).get_feed(search_url)
xml_data = self.cache.get_rss(search_url)
cnt = len(items[mode])
if xml_data and 'entries' in xml_data:

View file

@ -97,6 +97,8 @@ class PrivateHDProvider(generic.TorrentProvider):
'+'.join(search_string.split()), self._categories_string(mode, ''))
html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode])
try:

View file

@ -85,11 +85,16 @@ class PTFProvider(generic.TorrentProvider):
search_url = self.urls['search'] % ('+'.join(search_string.split()), self._categories_string(mode))
html = self.get_url(search_url)
if self.should_skip():
return results
time.sleep(2)
if not self.has_all_cookies(['session_key']):
if not self._authorised():
return results
html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode])
try:

View file

@ -58,8 +58,8 @@ class RarbgProvider(generic.TorrentProvider):
return True
for r in range(0, 3):
response = helpers.getURL(self.urls['api_token'], session=self.session, json=True)
if response and 'token' in response:
response = self.get_url(self.urls['api_token'], json=True)
if not self.should_skip() and response and 'token' in response:
self.token = response['token']
self.token_expiry = datetime.datetime.now() + datetime.timedelta(minutes=14)
return True
@ -125,6 +125,8 @@ class RarbgProvider(generic.TorrentProvider):
searched_url = search_url % {'r': int(self.confirmed), 't': self.token}
data_json = self.get_url(searched_url, json=True)
if self.should_skip():
return results
self.token_expiry = datetime.datetime.now() + datetime.timedelta(minutes=14)
self.request_throttle = datetime.datetime.now() + datetime.timedelta(seconds=3)

View file

@ -63,6 +63,8 @@ class RevTTProvider(generic.TorrentProvider):
html = self.get_url(self.urls['search'] % ('+'.join(search_string.split()),
self._categories_string(mode)))
if self.should_skip():
return results
cnt = len(items[mode])
try:

View file

@ -21,7 +21,6 @@ from . import generic
from sickbeard import logger, tvcache
from sickbeard.helpers import tryInt
from sickbeard.exceptions import ex
from sickbeard.rssfeeds import RSSFeeds
from lib.bencode import bdecode
@ -41,8 +40,6 @@ class TorrentRssProvider(generic.TorrentProvider):
self.search_mode = search_mode
self.search_fallback = bool(tryInt(search_fallback))
self.feeder = RSSFeeds(self)
def image_name(self):
return generic.GenericProvider.image_name(self, 'torrentrss')
@ -102,6 +99,9 @@ class TorrentRssProvider(generic.TorrentProvider):
break
else:
torrent_file = self.get_url(url)
if self.should_skip():
break
try:
bdecode(torrent_file)
break
@ -120,7 +120,7 @@ class TorrentRssProvider(generic.TorrentProvider):
result = []
for mode in search_params.keys():
data = self.feeder.get_feed(self.url)
data = self.cache.get_rss(self.url)
result += (data and 'entries' in data) and data.entries or []

View file

@ -61,6 +61,8 @@ class SceneHDProvider(generic.TorrentProvider):
search_url = self.urls['search'] % (search_string, self._categories_string(mode, '%s', ','))
html = self.get_url(search_url, timeout=90)
if self.should_skip():
return results
cnt = len(items[mode])
try:

View file

@ -80,6 +80,8 @@ class SceneTimeProvider(generic.TorrentProvider):
self.session.headers.update({'Referer': self.url + 'browse.php', 'X-Requested-With': 'XMLHttpRequest'})
html = self.get_url(self.urls['browse'], post_data=post_data)
if self.should_skip():
return results
cnt = len(items[mode])
try:

View file

@ -49,8 +49,8 @@ class ShazbatProvider(generic.TorrentProvider):
def _authorised(self, **kwargs):
return super(ShazbatProvider, self)._authorised(
logged_in=(lambda y=None: '<input type="password"' not in helpers.getURL(
self.urls['feeds'], session=self.session)), post_params={'tv_login': self.username, 'form_tmpl': True})
logged_in=(lambda y=None: '<input type="password"' not in self.get_url(self.urls['feeds'], skip_auth=True)),
post_params={'tv_login': self.username, 'form_tmpl': True})
def _search_provider(self, search_params, **kwargs):
@ -70,11 +70,16 @@ class ShazbatProvider(generic.TorrentProvider):
if 'Cache' == mode:
search_url = self.urls['browse']
html = self.get_url(search_url)
if self.should_skip():
return results
else:
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
search_string = search_string.replace(show_detail, '').strip()
search_url = self.urls['search'] % search_string
html = self.get_url(search_url)
if self.should_skip():
return results
shows = rc['show_id'].findall(html)
if not any(shows):
continue
@ -85,6 +90,8 @@ class ShazbatProvider(generic.TorrentProvider):
continue
html and time.sleep(1.1)
html += self.get_url(self.urls['show'] % sid)
if self.should_skip():
return results
cnt = len(items[mode])
try:

View file

@ -56,6 +56,8 @@ class SkytorrentsProvider(generic.TorrentProvider):
search_url = self.urls['search'] % search_string
html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode])
try:

View file

@ -67,6 +67,8 @@ class SpeedCDProvider(generic.TorrentProvider):
jxt=2, jxw='b', freeleech=('on', None)[not self.freeleech])
data_json = self.get_url(self.urls['search'], post_data=post_data, json=True)
if self.should_skip():
return results
cnt = len(items[mode])
try:

View file

@ -106,7 +106,7 @@ class ThePirateBayProvider(generic.TorrentProvider):
quality = Quality.UNKNOWN
file_name = None
data = self.get_url('%sajax_details_filelist.php?id=%s' % (self.url, torrent_id))
if not data:
if self.should_skip() or not data:
return None
files_list = re.findall('<td.+>(.*?)</td>', data)
@ -193,6 +193,8 @@ class ThePirateBayProvider(generic.TorrentProvider):
search_url = self.urls['browse'] if 'Cache' == mode \
else self.urls['search'] % (urllib.quote(search_string))
html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode])
try:

View file

@ -49,6 +49,9 @@ class TokyoToshokanProvider(generic.TorrentProvider):
'stats': 'S:\s*?(\d)+\s*L:\s*(\d+)', 'size': 'size:\s*(\d+[.,]\d+\w+)'}.iteritems())
html = self.get_url(search_url)
if self.should_skip():
return self._sort_seeding(mode, results)
if html:
try:
with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
@ -103,7 +106,7 @@ class TokyoToshokanCache(tvcache.TVCache):
mode = 'Cache'
search_url = '%srss.php?%s' % (self.provider.url, urllib.urlencode({'filter': '1'}))
data = self.getRSSFeed(search_url)
data = self.get_rss(search_url)
results = []
if data and 'entries' in data:

View file

@ -74,6 +74,8 @@ class TorLockProvider(generic.TorrentProvider):
else self.urls['search'] % (urllib.quote_plus(search_string).replace('+', '-'))
html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode])
try:

View file

@ -61,6 +61,8 @@ class TorrentBytesProvider(generic.TorrentProvider):
search_url = self.urls['search'] % (search_string, self._categories_string(mode))
html = self.get_url(search_url, timeout=90)
if self.should_skip():
return results
cnt = len(items[mode])
try:

View file

@ -86,6 +86,8 @@ class TorrentDayProvider(generic.TorrentProvider):
search_string, ('&sort=7&type=desc', '')['Cache' == mode])
html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode])
try:

View file

@ -69,6 +69,8 @@ class TorrentingProvider(generic.TorrentProvider):
search_url = self.urls['search'] % (self._categories_string(), search_string)
html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode])
try:

View file

@ -62,6 +62,8 @@ class TorrentLeechProvider(generic.TorrentProvider):
'query': isinstance(search_string, unicode) and unidecode(search_string) or search_string}
html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode])
try:

View file

@ -93,6 +93,8 @@ class Torrentz2Provider(generic.TorrentProvider):
'tv%s' % ('+' + quote_plus(search_string), '')['Cache' == mode])
html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode])
try:

View file

@ -66,6 +66,8 @@ class TVChaosUKProvider(generic.TorrentProvider):
'order': 'desc', 'daysprune': '-1'})
html = self.get_url(self.urls['search'], **kwargs)
if self.should_skip():
return results
cnt = len(items[mode])
try:

View file

@ -70,6 +70,8 @@ class WOPProvider(generic.TorrentProvider):
search_url = self.urls['search'] % (search_string, self._categories_string(mode, 'cats2[]=%s'))
html = self.get_url(search_url, timeout=90)
if self.should_skip():
return results
cnt = len(items[mode])
try:

View file

@ -58,6 +58,8 @@ class ZooqleProvider(generic.TorrentProvider):
search_url = self.urls['search'] % (search_string, self._categories_string(mode, '', ','))
html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode])
try:

View file

@ -5,54 +5,32 @@
import feedparser
from sickbeard import helpers, logger
from sickbeard import logger
from sickbeard.exceptions import ex
class RSSFeeds:
def __init__(self, provider=None):
self.provider = provider
self.response = None
def _check_auth_cookie(self):
def get_feed(self, url, **kwargs):
if self.provider:
return self.provider.check_auth_cookie()
return True
if self.provider and self.provider.check_auth_cookie():
response = self.provider.get_url(url, **kwargs)
if not self.provider.should_skip() and response:
try:
data = feedparser.parse(response)
data['rq_response'] = self.provider.session.response
if data and 'entries' in data:
return data
# noinspection PyUnusedLocal
def cb_response(self, r, *args, **kwargs):
self.response = dict(url=r.url, elapsed=r.elapsed, from_cache=r.from_cache)
return r
if data and 'error' in data.feed:
err_code = data.feed['error']['code']
err_desc = data.feed['error']['description']
logger.log(u'RSS error:[%s] code:[%s]' % (err_desc, err_code), logger.DEBUG)
else:
logger.log(u'RSS error loading url: ' + url, logger.DEBUG)
def get_feed(self, url, request_headers=None, **kwargs):
if not self._check_auth_cookie():
return
session = None
if self.provider and hasattr(self.provider, 'session'):
session = self.provider.session
response = helpers.getURL(url, headers=request_headers, session=session,
hooks=dict(response=self.cb_response), **kwargs)
if not response:
return
try:
feed = feedparser.parse(response)
feed['rq_response'] = self.response
if feed and 'entries' in feed:
return feed
if feed and 'error' in feed.feed:
err_code = feed.feed['error']['code']
err_desc = feed.feed['error']['description']
logger.log(u'RSS ERROR:[%s] CODE:[%s]' % (err_desc, err_code), logger.DEBUG)
else:
logger.log(u'RSS error loading url: ' + url, logger.DEBUG)
except Exception as e:
logger.log(u'RSS error: ' + ex(e), logger.DEBUG)
except Exception as e:
logger.log(u'RSS error: ' + ex(e), logger.DEBUG)

View file

@ -143,7 +143,7 @@ def snatch_episode(result, end_status=SNATCHED):
# make sure we have the torrent file content
if not result.content and not result.url.startswith('magnet'):
result.content = result.provider.get_url(result.url)
if not result.content:
if result.provider.should_skip() or not result.content:
logger.log(u'Torrent content failed to download from %s' % result.url, logger.ERROR)
return False
# Snatches torrent with client
@ -465,6 +465,8 @@ def search_for_needed_episodes(episodes):
best_result.content = None
if not best_result.url.startswith('magnet'):
best_result.content = best_result.provider.get_url(best_result.url)
if best_result.provider.should_skip():
break
if not best_result.content:
continue

View file

@ -107,7 +107,7 @@ class TVCache:
return []
def getRSSFeed(self, url, **kwargs):
def get_rss(self, url, **kwargs):
return RSSFeeds(self.provider).get_feed(url, **kwargs)
def _translateTitle(self, title):

View file

@ -4531,12 +4531,12 @@ class ManageSearches(Manage):
t.recent_search_status = sickbeard.searchQueueScheduler.action.is_recentsearch_in_progress()
t.find_propers_status = sickbeard.searchQueueScheduler.action.is_propersearch_in_progress()
t.queue_length = sickbeard.searchQueueScheduler.action.queue_length()
t.provider_error_stats = [{'name': p.name, 'prov_id': p.get_id(), 'errors': p.errors.errors_sorted,
'hit_limit_time': p.hit_limit_time, 'failure_time': p.failure_time,
'last_error': p.last_error,
'next_try': p.get_next_try_time, 'has_limit': getattr(p, 'has_limit', False)}
for p in sickbeard.providerList + sickbeard.newznabProviderList]
t.provider_errors = 0 < len([p for p in t.provider_error_stats if len(p['errors'])])
t.provider_fail_stats = filter(lambda stat: len(stat['fails']), [{
'active': p.is_active(), 'name': p.name, 'prov_id': p.get_id(), 'prov_img': p.image_name(),
'fails': p.fails.fails_sorted, 'tmr_limit_time': p.tmr_limit_time,
'next_try': p.get_next_try_time, 'has_limit': getattr(p, 'has_limit', False)}
for p in sickbeard.providerList + sickbeard.newznabProviderList])
t.provider_fails = 0 < len([p for p in t.provider_fail_stats if len(p['fails'])])
t.submenu = self.ManageMenu('Search')