mirror of
https://github.com/SickGear/SickGear.git
synced 2025-01-21 00:53:37 +00:00
Merge branch 'release/0.14.0'
This commit is contained in:
commit
f31332185a
102 changed files with 2277 additions and 634 deletions
20
CHANGES.md
20
CHANGES.md
|
@ -1,4 +1,22 @@
|
|||
### 0.13.15 (2018-01-26 10:30:00 UTC)
|
||||
### 0.14.0 (2018-02-01 02:30:00 UTC)
|
||||
|
||||
* Change improve core scheduler logic
|
||||
* Change improve media process to parse anime format 'Show Name 123 - 001 - Ep 1 name'
|
||||
* Add free space stat (if obtainable) of parent folder(s) to footer
|
||||
* Add option "Display disk free" to general config/interface page (default enabled)
|
||||
* Add a provider error table to page Manage/Media Search
|
||||
* Add failure handling, skip provider for x hour(s) depending on count of failures
|
||||
* Add detection of Too Many Requests (Supporting providers UC and BTN)
|
||||
* Add footer icon button to switch time layouts
|
||||
* Add performance gains for proper search by integrating it into recent search
|
||||
* Add the once per day proper finder time to footer, this process catches any propers missed during recent searches
|
||||
* Add ability to differentiate webdl/rip sources so overwriting propers is always done from the same source (e.g. AMZN)
|
||||
* Change layout of quality custom to improve clarity
|
||||
* Change tweak text of SD DVD to include BD/BR
|
||||
* Change TBy prov add UHD cat
|
||||
|
||||
|
||||
### 0.13.15 (2018-01-26 10:30:00 UTC)
|
||||
|
||||
* Fix save on config general
|
||||
|
||||
|
|
51
SickBeard.py
51
SickBeard.py
|
@ -76,6 +76,7 @@ from sickbeard.exceptions import ex
|
|||
from lib.configobj import ConfigObj
|
||||
|
||||
throwaway = datetime.datetime.strptime('20110101', '%Y%m%d')
|
||||
rollback_loaded = None
|
||||
|
||||
signal.signal(signal.SIGINT, sickbeard.sig_handler)
|
||||
signal.signal(signal.SIGTERM, sickbeard.sig_handler)
|
||||
|
@ -153,6 +154,19 @@ class SickGear(object):
|
|||
|
||||
return '\n'.join(help_msg)
|
||||
|
||||
@staticmethod
|
||||
def execute_rollback(mo, max_v):
|
||||
global rollback_loaded
|
||||
try:
|
||||
if None is rollback_loaded:
|
||||
rollback_loaded = db.get_rollback_module()
|
||||
if None is not rollback_loaded:
|
||||
rollback_loaded.__dict__[mo]().run(max_v)
|
||||
else:
|
||||
print(u'ERROR: Could not download Rollback Module.')
|
||||
except (StandardError, Exception):
|
||||
pass
|
||||
|
||||
def start(self):
|
||||
# do some preliminary stuff
|
||||
sickbeard.MY_FULLNAME = os.path.normpath(os.path.abspath(__file__))
|
||||
|
@ -324,14 +338,28 @@ class SickGear(object):
|
|||
print('Stack Size %s not set: %s' % (stack_size, e.message))
|
||||
|
||||
# check all db versions
|
||||
for d, min_v, max_v, mo in [
|
||||
('failed.db', sickbeard.failed_db.MIN_DB_VERSION, sickbeard.failed_db.MAX_DB_VERSION, 'FailedDb'),
|
||||
('cache.db', sickbeard.cache_db.MIN_DB_VERSION, sickbeard.cache_db.MAX_DB_VERSION, 'CacheDb'),
|
||||
('sickbeard.db', sickbeard.mainDB.MIN_DB_VERSION, sickbeard.mainDB.MAX_DB_VERSION, 'MainDb')
|
||||
for d, min_v, max_v, base_v, mo in [
|
||||
('failed.db', sickbeard.failed_db.MIN_DB_VERSION, sickbeard.failed_db.MAX_DB_VERSION, sickbeard.failed_db.TEST_BASE_VERSION, 'FailedDb'),
|
||||
('cache.db', sickbeard.cache_db.MIN_DB_VERSION, sickbeard.cache_db.MAX_DB_VERSION, sickbeard.cache_db.TEST_BASE_VERSION, 'CacheDb'),
|
||||
('sickbeard.db', sickbeard.mainDB.MIN_DB_VERSION, sickbeard.mainDB.MAX_DB_VERSION, sickbeard.mainDB.TEST_BASE_VERSION, 'MainDb')
|
||||
]:
|
||||
cur_db_version = db.DBConnection(d).checkDBVersion()
|
||||
|
||||
if cur_db_version > 0:
|
||||
# handling of standalone TEST db versions
|
||||
if cur_db_version >= 100000 and cur_db_version != max_v:
|
||||
print('Your [%s] database version (%s) is a test db version and doesn\'t match SickGear required '
|
||||
'version (%s), downgrading to production db' % (d, cur_db_version, max_v))
|
||||
self.execute_rollback(mo, max_v)
|
||||
cur_db_version = db.DBConnection(d).checkDBVersion()
|
||||
if cur_db_version >= 100000:
|
||||
print(u'Rollback to production failed.')
|
||||
sys.exit(u'If you have used other forks, your database may be unusable due to their changes')
|
||||
if 100000 <= max_v and None is not base_v:
|
||||
max_v = base_v # set max_v to the needed base production db for test_db
|
||||
print(u'Rollback to production of [%s] successful.' % d)
|
||||
|
||||
# handling of production db versions
|
||||
if 0 < cur_db_version < 100000:
|
||||
if cur_db_version < min_v:
|
||||
print(u'Your [%s] database version (%s) is too old to migrate from with this version of SickGear'
|
||||
% (d, cur_db_version))
|
||||
|
@ -341,19 +369,16 @@ class SickGear(object):
|
|||
print(u'Your [%s] database version (%s) has been incremented past'
|
||||
u' what this version of SickGear supports. Trying to rollback now. Please wait...' %
|
||||
(d, cur_db_version))
|
||||
try:
|
||||
rollback_loaded = db.get_rollback_module()
|
||||
if None is not rollback_loaded:
|
||||
rollback_loaded.__dict__[mo]().run(max_v)
|
||||
else:
|
||||
print(u'ERROR: Could not download Rollback Module.')
|
||||
except (StandardError, Exception):
|
||||
pass
|
||||
self.execute_rollback(mo, max_v)
|
||||
if db.DBConnection(d).checkDBVersion() > max_v:
|
||||
print(u'Rollback failed.')
|
||||
sys.exit(u'If you have used other forks, your database may be unusable due to their changes')
|
||||
print(u'Rollback of [%s] successful.' % d)
|
||||
|
||||
# free memory
|
||||
global rollback_loaded
|
||||
rollback_loaded = None
|
||||
|
||||
# Initialize the config and our threads
|
||||
sickbeard.initialize(console_logging=self.console_logging)
|
||||
|
||||
|
|
|
@ -336,6 +336,7 @@ home_newShow.tmpl
|
|||
color:#707070
|
||||
}
|
||||
|
||||
.btn-inverse.dark-bg,
|
||||
#addRootDirTable td label .filepath,
|
||||
.grey-text{color:#999}
|
||||
.highlight-text{color:#fff}
|
||||
|
@ -762,6 +763,60 @@ a.whitelink{
|
|||
|
||||
}
|
||||
|
||||
/* TABLE BACKGROUND color */
|
||||
.provider-failures.hover-highlight td:before,
|
||||
.provider-failures.focus-highlight td:before{
|
||||
background:#222
|
||||
}
|
||||
|
||||
/* ODD ZEBRA STRIPE color (needs zebra widget) */
|
||||
.provider-failures.hover-highlight .odd td:before,
|
||||
.provider-failures.hover-highlight .odd th:before,
|
||||
.provider-failures.focus-highlight .odd td:before,
|
||||
.provider-failures.focus-highlight .odd th:before{
|
||||
background:#333
|
||||
}
|
||||
/* EVEN ZEBRA STRIPE color (needs zebra widget) */
|
||||
.provider-failures.hover-highlight .even td:before,
|
||||
.provider-failures.hover-highlight .even th:before,
|
||||
.provider-failures.focus-highlight .even td:before,
|
||||
.provider-failures.focus-highlight .even th:before{
|
||||
background-color:#2e2e2e
|
||||
}
|
||||
|
||||
/* HOVER ROW highlight colors */
|
||||
.provider-failures.hover-highlight tbody > tr:hover > td, /* override tablesorter theme row hover */
|
||||
.provider-failures.hover-highlight tbody > tr.odd:hover > td,
|
||||
.provider-failures.hover-highlight tbody > tr.even:hover > td{
|
||||
background-color:#282828
|
||||
}
|
||||
/* HOVER COLUMN highlight colors */
|
||||
.provider-failures.hover-highlight tbody tr th:hover::after,
|
||||
.provider-failures.hover-highlight tbody tr td:hover::after{
|
||||
background-color:#282828
|
||||
}
|
||||
|
||||
/* FOCUS ROW highlight color (touch devices) */
|
||||
.provider-failures.focus-highlight td:focus::before,
|
||||
.provider-failures.focus-highlight th:focus::before{
|
||||
background-color:#181818
|
||||
}
|
||||
/* FOCUS COLUMN highlight color (touch devices) */
|
||||
.provider-failures.focus-highlight td:focus::after,
|
||||
.provider-failures.focus-highlight th:focus::after{
|
||||
background-color:#181818
|
||||
}
|
||||
/* FOCUS CELL highlight color */
|
||||
.provider-failures.focus-highlight th:focus,
|
||||
.provider-failures.focus-highlight td:focus,
|
||||
.provider-failures.focus-highlight .odd th:focus,
|
||||
.provider-failures.focus-highlight .odd td:focus,
|
||||
.provider-failures.focus-highlight .even th:focus,
|
||||
.provider-failures.focus-highlight .even td:focus{
|
||||
background-color:#181818;
|
||||
color:#ddd
|
||||
}
|
||||
|
||||
/* =======================================================================
|
||||
404.tmpl
|
||||
========================================================================== */
|
||||
|
@ -1374,7 +1429,7 @@ div.formpaginate .prev, div.formpaginate .next{
|
|||
background:#2265a1
|
||||
}
|
||||
|
||||
#customQualityWrapper .tip-text p{
|
||||
#custom-quality-wrapper .tip-text p{
|
||||
color:#999
|
||||
}
|
||||
|
||||
|
|
|
@ -29,6 +29,7 @@ pre .prelight-num{
|
|||
background-image:url("../images/glyphicons-halflings-white.png")
|
||||
}
|
||||
|
||||
.dark-bg .icon-glyph,
|
||||
.icon-white{
|
||||
background-image:url("../images/glyphicons-halflings.png")
|
||||
}
|
||||
|
@ -351,6 +352,7 @@ home_newShow.tmpl
|
|||
color:#909090
|
||||
}
|
||||
|
||||
.btn-inverse.dark-bg,
|
||||
#addRootDirTable td label .filepath,
|
||||
.grey-text{color:#666}
|
||||
.highlight-text{color:#000}
|
||||
|
@ -742,6 +744,60 @@ a.whitelink{
|
|||
color:#000
|
||||
}
|
||||
|
||||
/* TABLE BACKGROUND color */
|
||||
.provider-failures.hover-highlight td:before,
|
||||
.provider-failures.focus-highlight td:before{
|
||||
background:#fff
|
||||
}
|
||||
|
||||
/* ODD ZEBRA STRIPE color (needs zebra widget) */
|
||||
.provider-failures.hover-highlight .odd th:before,
|
||||
.provider-failures.hover-highlight .odd td:before,
|
||||
.provider-failures.focus-highlight .odd th:before,
|
||||
.provider-failures.focus-highlight .odd td:before{
|
||||
background:#f5f1e4
|
||||
}
|
||||
/* EVEN ZEBRA STRIPE color (needs zebra widget) */
|
||||
.provider-failures.hover-highlight .even th:before,
|
||||
.provider-failures.hover-highlight .even td:before,
|
||||
.provider-failures.focus-highlight .even th:before,
|
||||
.provider-failures.focus-highlight .even td:before{
|
||||
background-color:#dfdacf;
|
||||
}
|
||||
|
||||
/* HOVER ROW highlight colors */
|
||||
.provider-failures.hover-highlight tbody > tr:hover > td, /* override tablesorter theme row hover */
|
||||
.provider-failures.hover-highlight tbody > tr.odd:hover > td,
|
||||
.provider-failures.hover-highlight tbody > tr.even:hover > td{
|
||||
background-color:#f4f3c2
|
||||
}
|
||||
/* HOVER COLUMN highlight colors */
|
||||
.provider-failures.hover-highlight tbody tr th:hover::after,
|
||||
.provider-failures.hover-highlight tbody tr td:hover::after{
|
||||
background-color:#f4f3c2
|
||||
}
|
||||
|
||||
/* FOCUS ROW highlight color (touch devices) */
|
||||
.provider-failures.focus-highlight th:focus::before,
|
||||
.provider-failures.focus-highlight td:focus::before{
|
||||
background-color:#dfdead
|
||||
}
|
||||
/* FOCUS COLUMN highlight color (touch devices) */
|
||||
.provider-failures.focus-highlight th:focus::after,
|
||||
.provider-failures.focus-highlight td:focus::after{
|
||||
background-color:#dfdead
|
||||
}
|
||||
/* FOCUS CELL highlight color */
|
||||
.provider-failures.focus-highlight th:focus,
|
||||
.provider-failures.focus-highlight td:focus,
|
||||
.provider-failures.focus-highlight .odd th:focus,
|
||||
.provider-failures.focus-highlight .odd td:focus,
|
||||
.provider-failures.focus-highlight .even th:focus,
|
||||
.provider-failures.focus-highlight .even td:focus{
|
||||
background-color:#dfdead;
|
||||
color:#222
|
||||
}
|
||||
|
||||
/* =======================================================================
|
||||
404.tmpl
|
||||
========================================================================== */
|
||||
|
@ -1335,7 +1391,7 @@ div.formpaginate .prev, div.formpaginate .next{
|
|||
background:#57442b
|
||||
}
|
||||
|
||||
#customQualityWrapper .tip-text p{
|
||||
#custom-quality-wrapper .tip-text p{
|
||||
color:#666
|
||||
}
|
||||
|
||||
|
@ -1381,8 +1437,8 @@ tablesorter.css
|
|||
}
|
||||
|
||||
thead.tablesorter-stickyHeader{
|
||||
border-top:2px solid #fff;
|
||||
border-bottom:2px solid #fff
|
||||
border-top:2px solid #ddd;
|
||||
border-bottom:2px solid #ddd
|
||||
}
|
||||
|
||||
/* Zebra Widget - row alternating colors */
|
||||
|
@ -1404,7 +1460,7 @@ thead.tablesorter-stickyHeader{
|
|||
}
|
||||
|
||||
.tablesorter tfoot tr{
|
||||
color:#fff;
|
||||
color:#ddd;
|
||||
text-align:center;
|
||||
text-shadow:-1px -1px 0 rgba(0, 0, 0, 0.3);
|
||||
background-color:#333;
|
||||
|
|
|
@ -623,6 +623,20 @@ inc_top.tmpl
|
|||
content:"\e900"
|
||||
}
|
||||
|
||||
.searchadd.icon-glyph{
|
||||
display:none
|
||||
}
|
||||
.active .searchadd.icon-glyph{
|
||||
opacity:0.4;filter:alpha(opacity=40);
|
||||
float:none;
|
||||
display:inline-block;
|
||||
margin:0 0 -2px 0;
|
||||
height:14px
|
||||
}
|
||||
.searchadd.icon-glyph{
|
||||
background-position:-337px 0
|
||||
}
|
||||
|
||||
/* =======================================================================
|
||||
inc_bottom.tmpl
|
||||
========================================================================== */
|
||||
|
@ -639,6 +653,27 @@ inc_bottom.tmpl
|
|||
display:inline
|
||||
}
|
||||
|
||||
.footer .icon-glyph{
|
||||
opacity:0.4;filter:alpha(opacity=40);
|
||||
float:none;
|
||||
display:inline-block;
|
||||
margin:0 0 -1px 2px;
|
||||
height:12px;
|
||||
width:14px
|
||||
}
|
||||
.footer .icon-glyph:hover{
|
||||
opacity:0.6;filter:alpha(opacity=60);
|
||||
cursor:pointer
|
||||
}
|
||||
.footer .icon-glyph.timeleft,
|
||||
.footer .icon-glyph.time:hover{
|
||||
background-position:-49px -25px
|
||||
}
|
||||
.footer .icon-glyph.time,
|
||||
.footer .icon-glyph.timeleft:hover{
|
||||
background-position:-193px -121px
|
||||
}
|
||||
|
||||
/* =======================================================================
|
||||
inc_rootDirs.tmpl
|
||||
========================================================================== */
|
||||
|
@ -1103,14 +1138,14 @@ div.formpaginate{
|
|||
margin-right:6px
|
||||
}
|
||||
|
||||
#edit-show #customQualityWrapper .tip-text p,
|
||||
#addShowForm #customQualityWrapper .tip-text p,
|
||||
#edit-show #customQualityWrapper .tip-text em,
|
||||
#addShowForm #customQualityWrapper .tip-text em{
|
||||
#edit-show #custom-quality-wrapper .tip-text p,
|
||||
#addShowForm #custom-quality-wrapper .tip-text p,
|
||||
#edit-show #custom-quality-wrapper .tip-text em,
|
||||
#addShowForm #custom-quality-wrapper .tip-text em{
|
||||
font-size:13px
|
||||
}
|
||||
|
||||
#addShowForm .stepDiv #customQuality.show-if-quality-custom span.component-desc p{
|
||||
#addShowForm .stepDiv #custom-quality.show-if-quality-custom span.component-desc p{
|
||||
font-size:12px
|
||||
}
|
||||
|
||||
|
@ -2711,7 +2746,7 @@ config*.tmpl
|
|||
color:#666
|
||||
}
|
||||
|
||||
.stepDiv #customQualityWrapper h4{
|
||||
.stepDiv #custom-quality-wrapper h4{
|
||||
margin-top:6px;
|
||||
padding:0 0
|
||||
}
|
||||
|
@ -2743,7 +2778,7 @@ config*.tmpl
|
|||
float:left
|
||||
}
|
||||
|
||||
#config .nocheck, #config div #customQuality, .metadataDiv{
|
||||
#config .nocheck, #config div #custom-quality, .metadataDiv{
|
||||
padding-left:20px
|
||||
}
|
||||
|
||||
|
@ -2826,19 +2861,46 @@ select .selected:before{
|
|||
}
|
||||
|
||||
#editShow .field-pair #SceneException h4,
|
||||
#editShow .field-pair #customQuality h4{
|
||||
#editShow .field-pair #custom-quality h4{
|
||||
font-size:13px !important;
|
||||
}
|
||||
|
||||
#editShow .field-pair #SceneException h4,
|
||||
#editShow .field-pair #customQuality h4{
|
||||
#editShow .field-pair #custom-quality h4{
|
||||
margin-bottom:6px
|
||||
}
|
||||
|
||||
#editShow .field-pair #customQuality h4{
|
||||
#editShow .field-pair #custom-quality h4{
|
||||
line-height:normal
|
||||
}
|
||||
|
||||
#custom-quality .btn,
|
||||
#custom-quality .btn-placeholder{
|
||||
width:13em
|
||||
}
|
||||
#custom-quality .btn-placeholder{
|
||||
display:inline-block;
|
||||
border:1px transparent
|
||||
}
|
||||
|
||||
#addShowForm #add-white,
|
||||
#addShowForm #add-black{
|
||||
margin:0 0 10px 30px !important
|
||||
}
|
||||
#addShowForm #remove-white,
|
||||
#addShowForm #remove-black{
|
||||
margin:0 0 0 30px !important
|
||||
}
|
||||
#edit-show #add-white,
|
||||
#edit-show #add-black{
|
||||
margin:0 0 10px !important
|
||||
}
|
||||
|
||||
#edit-show #remove-white,
|
||||
#edit-show #remove-black{
|
||||
margin:0 !important
|
||||
}
|
||||
|
||||
.test-notification{
|
||||
padding:5px;
|
||||
margin-bottom:10px;
|
||||
|
@ -3191,6 +3253,85 @@ input.get_less_eps{
|
|||
display:none
|
||||
}
|
||||
|
||||
#media-search .section{
|
||||
padding-bottom:10px
|
||||
}
|
||||
#media-search .btn{
|
||||
margin:0 6px 0 0;
|
||||
min-width:70px
|
||||
}
|
||||
#media-search .btn.shows-more,
|
||||
#media-search .btn.shows-less{
|
||||
margin:6px 6px 6px 0;
|
||||
}
|
||||
#media-search .btn.provider-retry{
|
||||
margin:6px 0 6px 4px;
|
||||
}
|
||||
.tablesorter.provider-failures{width:auto;clear:both;margin-bottom:10px}
|
||||
.tablesorter.provider-failures > tbody > tr.tablesorter-childRow td{display:none}
|
||||
.tablesorter.provider-failures.tablesorter > tbody > tr{background-color:transparent}
|
||||
|
||||
.provider-failures.hover-highlight th:hover::after,
|
||||
.provider-failures.hover-highlight td:hover::after,
|
||||
.provider-failures.focus-highlight th:focus::after,
|
||||
.provider-failures.focus-highlight td:focus::after{
|
||||
content:'';
|
||||
position:absolute;
|
||||
width:100%;
|
||||
height:999em;
|
||||
left:0;
|
||||
top:-555em;
|
||||
z-index:-1
|
||||
}
|
||||
.provider-failures.focus-highlight th:focus::before,
|
||||
.provider-failures.focus-highlight td:focus::before{
|
||||
content:'';
|
||||
position:absolute;
|
||||
width:999em;
|
||||
height:100%;
|
||||
left:-555em;
|
||||
top:0;
|
||||
z-index:-2
|
||||
}
|
||||
/* required styles */
|
||||
.provider-failures.hover-highlight,
|
||||
.provider-failures.focus-highlight{
|
||||
overflow:hidden
|
||||
}
|
||||
.provider-failures.hover-highlight th,
|
||||
.provider-failures.hover-highlight td,
|
||||
.provider-failures.focus-highlight th,
|
||||
.provider-failures.focus-highlight td{
|
||||
position:relative;
|
||||
outline:0
|
||||
}
|
||||
/* override the tablesorter theme styling */
|
||||
.provider-failures.hover-highlight,
|
||||
.provider-failures.hover-highlight tbody > tr > td,
|
||||
.provider-failures.focus-highlight,
|
||||
.provider-failures.focus-highlight tbody > tr > td,
|
||||
/* override zebra styling */
|
||||
.provider-failures.hover-highlight tbody tr.even > th,
|
||||
.provider-failures.hover-highlight tbody tr.even > td,
|
||||
.provider-failures.hover-highlight tbody tr.odd > th,
|
||||
.provider-failures.hover-highlight tbody tr.odd > td,
|
||||
.provider-failures.focus-highlight tbody tr.even > th,
|
||||
.provider-failures.focus-highlight tbody tr.even > td,
|
||||
.provider-failures.focus-highlight tbody tr.odd > th,
|
||||
.provider-failures.focus-highlight tbody tr.odd > td{
|
||||
background:transparent
|
||||
}
|
||||
/* table background positioned under the highlight */
|
||||
.provider-failures.hover-highlight td:before,
|
||||
.provider-failures.focus-highlight td:before{
|
||||
content:'';
|
||||
position:absolute;
|
||||
width:100%;
|
||||
height:100%;
|
||||
left:0;
|
||||
top:0;
|
||||
z-index:-3
|
||||
}
|
||||
/* =======================================================================
|
||||
404.tmpl
|
||||
========================================================================== */
|
||||
|
@ -3423,6 +3564,12 @@ img[src=""],img:not([src]){
|
|||
left:0
|
||||
}
|
||||
|
||||
.bfr{
|
||||
position:absolute;
|
||||
left:-999px;
|
||||
top:-999px
|
||||
}
|
||||
|
||||
/* =======================================================================
|
||||
bootstrap Overrides
|
||||
========================================================================== */
|
||||
|
@ -4227,6 +4374,24 @@ div.formpaginate .prev, div.formpaginate .next{
|
|||
padding:15px
|
||||
}
|
||||
|
||||
#import-shows .stepDiv,
|
||||
.step-three .stepDiv{
|
||||
padding:15px 0
|
||||
}
|
||||
|
||||
#import-shows #addShowForm{
|
||||
width:861px
|
||||
}
|
||||
|
||||
.step-three #custom-quality-wrapper{
|
||||
width:831px
|
||||
}
|
||||
|
||||
#import-shows #addShowForm .stepDiv span.component-desc,
|
||||
#addShowForm .step-three .stepDiv span.component-desc{
|
||||
width:639px
|
||||
}
|
||||
|
||||
.stepDiv.parent-folder{
|
||||
padding:15px 0 0;
|
||||
width:430px;
|
||||
|
@ -4238,21 +4403,21 @@ div.formpaginate .prev, div.formpaginate .next{
|
|||
}
|
||||
|
||||
/* step 3 related */
|
||||
#edit-show #customQualityWrapper #customQuality,
|
||||
#customQuality{
|
||||
#edit-show #custom-quality-wrapper #custom-quality,
|
||||
#custom-quality{
|
||||
display:block;
|
||||
padding:0 0 10px 0;
|
||||
overflow:hidden;
|
||||
clear:both
|
||||
}
|
||||
|
||||
#customQualityWrapper div.component-group-desc{
|
||||
#custom-quality-wrapper div.component-group-desc{
|
||||
float:left;
|
||||
width:172px;
|
||||
padding:0
|
||||
}
|
||||
|
||||
#customQualityWrapper div.component-group-desc p{
|
||||
#custom-quality-wrapper div.component-group-desc p{
|
||||
margin:.8em 0;
|
||||
font-size:1.2em
|
||||
}
|
||||
|
@ -4268,8 +4433,7 @@ tablesorter.css
|
|||
margin-left:auto;
|
||||
color:#000;
|
||||
/* text-align:left;*/
|
||||
background-color:#ddd/*;
|
||||
border-spacing:0*/
|
||||
/* border-spacing:0*/
|
||||
}
|
||||
|
||||
#display-show .tablesorter{
|
||||
|
@ -4317,20 +4481,6 @@ tablesorter.css
|
|||
cursor:default
|
||||
}
|
||||
|
||||
thead.tablesorter-stickyHeader{
|
||||
border-top:2px solid #ddd;
|
||||
border-bottom:2px solid #ddd
|
||||
}
|
||||
|
||||
/* Zebra Widget - row alternating colors */
|
||||
.tablesorter tr.odd, .sickbeardTable tr.odd{
|
||||
background-color:#f5f1e4
|
||||
}
|
||||
|
||||
.tablesorter tr.even, .sickbeardTable tr.even{
|
||||
background-color:#dfdacf
|
||||
}
|
||||
|
||||
/* filter widget */
|
||||
.tablesorter .filtered{
|
||||
display:none
|
||||
|
@ -4346,9 +4496,7 @@ thead.tablesorter-stickyHeader{
|
|||
|
||||
.tablesorter tr.tablesorter-filter-row,
|
||||
.tablesorter tr.tablesorter-filter-row td{
|
||||
text-align:center;
|
||||
background:#eee;
|
||||
border-bottom:1px solid #ddd
|
||||
text-align:center
|
||||
}
|
||||
|
||||
/* optional disabled input styling */
|
||||
|
@ -4362,10 +4510,7 @@ thead.tablesorter-stickyHeader{
|
|||
}*/
|
||||
|
||||
.tablesorter tfoot tr{
|
||||
color:#ddd;
|
||||
text-align:center;
|
||||
text-shadow:-1px -1px 0 rgba(0, 0, 0, 0.3);
|
||||
background-color:#333;
|
||||
border-collapse:collapse
|
||||
}
|
||||
|
||||
|
|
|
@ -335,6 +335,17 @@
|
|||
</label>
|
||||
</div>
|
||||
|
||||
#if not hasattr($sickbeard, 'DISPLAY_FREESPACE')#<span class="red-text">Restart SickGear to reveal new option here</span>#else#
|
||||
<div class="field-pair">
|
||||
<label for="display-freespace">
|
||||
<span class="component-title">Display freespace</span>
|
||||
<span class="component-desc">
|
||||
<input type="checkbox" name="display_freespace" id="display-freespace"#echo ('', $checked)[$sg_var('DISPLAY_FREESPACE', True)]#>
|
||||
<p>free space of parent locations is refreshed into any page footer request</p>
|
||||
</span>
|
||||
</label>
|
||||
</div>
|
||||
#end if
|
||||
<div class="field-pair">
|
||||
<label for="sort_article">
|
||||
<span class="component-title">Sort with "The", "A", "An"</span>
|
||||
|
|
|
@ -74,18 +74,6 @@
|
|||
</span>
|
||||
</label>
|
||||
</div>
|
||||
<div class="field-pair">
|
||||
<label for="check_propers_interval">
|
||||
<span class="component-title">Check propers every:</span>
|
||||
<span class="component-desc">
|
||||
<select id="check_propers_interval" name="check_propers_interval" class="form-control input-sm">
|
||||
#for $curKey, $curText, $void in $propers_intervals:
|
||||
<option value="$curKey"#echo ('', $html_selected)[$sickbeard.CHECK_PROPERS_INTERVAL == $curKey]#>$curText</option>
|
||||
#end for
|
||||
</select>
|
||||
</span>
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="field-pair">
|
||||
|
|
|
@ -295,7 +295,7 @@
|
|||
<div>
|
||||
<span class="details-title">Initial</span>
|
||||
<span class="details-info">
|
||||
#echo ', '.join([$Quality.qualityStrings[$x] for $x in sorted($anyQualities)])#
|
||||
#echo ', '.join([$Quality.get_quality_ui($x) for $x in sorted($anyQualities)])#
|
||||
</span>
|
||||
</div>
|
||||
#end if
|
||||
|
@ -303,7 +303,7 @@
|
|||
<div>
|
||||
<span class="details-title">Upgrade to</span>
|
||||
<span class="details-info">
|
||||
#echo ', '.join([$Quality.qualityStrings[$x] for $x in sorted($bestQualities)])#
|
||||
#echo ', '.join([$Quality.get_quality_ui($x) for $x in sorted($bestQualities)])#
|
||||
</span>
|
||||
</div>
|
||||
#end if
|
||||
|
@ -396,7 +396,7 @@
|
|||
<optgroup label="Downloaded">
|
||||
#for $curStatus in sorted($Quality.DOWNLOADED)
|
||||
#if $DOWNLOADED != $curStatus
|
||||
<option value="$curStatus">$re.sub('Downloaded\s*\(([^\)]+)\)', r'\1', $statusStrings[$curStatus])</option>
|
||||
<option value="$curStatus">$re.sub('Downloaded\s*\(([^\)]+)\)', r'\1', $statusStrings[$curStatus].replace('SD DVD', 'SD DVD/BR/BD'))</option>
|
||||
#end if
|
||||
#end for
|
||||
<option value="$DOWNLOADED">with archived quality</option>
|
||||
|
|
|
@ -146,17 +146,17 @@
|
|||
|
||||
<div class="field-pair">
|
||||
#set $qualities = $common.Quality.splitQuality(int($show.quality))
|
||||
#set global $anyQualities = $qualities[0]
|
||||
#set global $bestQualities = $qualities[1]
|
||||
#set global $any_qualities = $qualities[0]
|
||||
#set global $best_qualities = $qualities[1]
|
||||
#include $os.path.join($sg_str('PROG_DIR'), 'gui/slick/interfaces/default/inc_qualityChooser.tmpl')
|
||||
|
||||
#if $anyQualities + $bestQualities
|
||||
#if $any_qualities + $best_qualities
|
||||
<div class="field-pair show-if-quality-custom" style="display:none">
|
||||
<label for="upgrade-once">
|
||||
<span class="component-title">Upgrade once</span>
|
||||
<span class="component-desc">
|
||||
<input type="checkbox" name="upgrade_once" id="upgrade-once"#echo ('', $html_checked)[$show.upgrade_once]#>
|
||||
<p>stop upgrading after matching the first best <em>Upgrade to</em> quality</p>
|
||||
<p>stop upgrading after matching the first best <em>upgrade</em> quality</p>
|
||||
</span>
|
||||
</label>
|
||||
</div>
|
||||
|
|
|
@ -134,7 +134,7 @@
|
|||
#if $SUBTITLED == $curStatus
|
||||
<img width="16" height="11" src="$sbRoot/images/flags/<%= hItem["resource"][len(hItem["resource"])-6:len(hItem["resource"])-4] + '.png' %>">
|
||||
#end if
|
||||
<span class="help" title="$os.path.basename($hItem["resource"])">$statusStrings[$curStatus]</span>
|
||||
<span class="help" title="$os.path.basename($hItem["resource"])">$statusStrings[$curStatus].replace('SD DVD', 'SD DVD/BR/BD')</span>
|
||||
</td>
|
||||
<td class="provider">
|
||||
#if $DOWNLOADED == $curStatus
|
||||
|
@ -156,7 +156,7 @@
|
|||
#end if
|
||||
#end if
|
||||
</td>
|
||||
<td><span class="hide">$curQuality</span><span class="quality $Quality.get_quality_css($curQuality)">$Quality.qualityStrings[$curQuality]</span></td>
|
||||
<td><span class="hide">$curQuality</span><span class="quality $Quality.get_quality_css($curQuality)">$Quality.get_quality_ui($curQuality)</span></td>
|
||||
</tr>
|
||||
#end for
|
||||
|
||||
|
@ -258,7 +258,7 @@
|
|||
#end for
|
||||
</td>
|
||||
#end if
|
||||
<td quality="$curQuality"><span class="quality $Quality.get_quality_css($curQuality)">$Quality.qualityStrings[$curQuality]</span></td>
|
||||
<td quality="$curQuality"><span class="quality $Quality.get_quality_css($curQuality)">$Quality.get_quality_ui($curQuality)</span></td>
|
||||
</tr>
|
||||
#end for
|
||||
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
#set global $sbPath = '../..'
|
||||
#set global $statpath = '../..'
|
||||
#set global $topmenu = 'home'
|
||||
#set global $page_body_attr = 'import-shows"'
|
||||
##
|
||||
#import os.path
|
||||
#include $os.path.join($sg_str('PROG_DIR'), 'gui/slick/interfaces/default/inc_top.tmpl')
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
<% def sg_str(varname, default=''): return getattr(sickbeard, varname, default) %>#slurp#
|
||||
<div class="stepDiv linefix">
|
||||
|
||||
<div style="float:right;margin:-30px -15px 20px 15px;font-size:12px;line-height:27px;">
|
||||
<div style="float:right;margin:-30px 0px 20px 15px;font-size:12px;line-height:27px;">
|
||||
<span class="grey-text">To reuse options below when adding more shows <input class="btn btn-inline" type="button" id="saveDefaultsButton" value="Save as defaults" disabled="disabled" /></span>
|
||||
</div>
|
||||
|
||||
|
@ -19,7 +19,7 @@
|
|||
<span class="component-desc">
|
||||
<select name="defaultStatus" id="statusSelect" class="form-control form-control-inline input-sm">
|
||||
#for $curStatus in [$SKIPPED, $WANTED, $ARCHIVED, $IGNORED]:
|
||||
<option value="$curStatus"#if $sg_var('STATUS_DEFAULT', SKIPPED) == $curStatus then $selected else ''#>$statusStrings[$curStatus]</option>
|
||||
<option value="$curStatus"#if $sg_var('STATUS_DEFAULT', SKIPPED) == $curStatus then $selected else ''#>$statusStrings[$curStatus].replace('SD DVD', 'SD DVD/BR/BD')</option>
|
||||
#end for
|
||||
</select>
|
||||
<span>set the initial status of missing episodes</span>
|
||||
|
@ -33,8 +33,8 @@
|
|||
|
||||
<div class="field-pair">
|
||||
#set $qualities = $Quality.splitQuality($sg_var('QUALITY_DEFAULT', SD))
|
||||
#set global $anyQualities = $qualities[0]
|
||||
#set global $bestQualities = $qualities[1]
|
||||
#set global $any_qualities = $qualities[0]
|
||||
#set global $best_qualities = $qualities[1]
|
||||
#include $os.path.join($sg_str('PROG_DIR'), 'gui/slick/interfaces/default/inc_qualityChooser.tmpl')
|
||||
</div>
|
||||
|
||||
|
|
|
@ -19,8 +19,8 @@
|
|||
#end for
|
||||
</select>
|
||||
<div style="position:relative; width:36px; height:64px; margin: -32px 0 0; top: 50%;" class="pull-right">
|
||||
<input style="margin:0 0 10px !important" class="btn" id="add-white" value="<<" type="button">
|
||||
<input style="margin:0 !important" class="btn clear:right" id="remove-white" value=">>" type="button">
|
||||
<input id="add-white" class="btn" value="<<" type="button">
|
||||
<input id="remove-white" class="btn clear:right" value=">>" type="button">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
@ -34,8 +34,8 @@
|
|||
#end for
|
||||
</select>
|
||||
<div style="position:relative; width:36px; height:64px; margin: -32px 0 0; top: 50%;" class="pull-right">
|
||||
<input style="margin:0 0 10px !important" class="btn" id="add-black" value="<<" type="button">
|
||||
<input style="margin:0 !important" class="btn clear:right" id="remove-black" value=">>" type="button">
|
||||
<input id="add-black" class="btn" value="<<" type="button">
|
||||
<input id="remove-black" class="btn clear:right" value=">>" type="button">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
@ -65,4 +65,4 @@
|
|||
</div>
|
||||
</div>
|
||||
</span>
|
||||
</div><!-- /blackwhitelist -->
|
||||
</div><!-- /blackwhitelist -->
|
||||
|
|
|
@ -3,6 +3,8 @@
|
|||
#import re
|
||||
#from sickbeard import db, sbdatetime
|
||||
#from sickbeard.common import *
|
||||
#from sickbeard.helpers import df
|
||||
#from sickbeard.webserve import MainHandler
|
||||
<% def sg_var(varname, default=False): return getattr(sickbeard, varname, default) %>#slurp#
|
||||
<% def sg_str(varname, default=''): return getattr(sickbeard, varname, default) %>#slurp#
|
||||
##
|
||||
|
@ -53,15 +55,10 @@
|
|||
#set $localheader = ''
|
||||
#end try
|
||||
<%
|
||||
try:
|
||||
next_backlog_timeleft = str(sickbeard.backlogSearchScheduler.next_backlog_timeleft()).split('.')[0]
|
||||
except AttributeError:
|
||||
next_backlog_timeleft = 'soon'
|
||||
try:
|
||||
recent_search_timeleft = str(sickbeard.recentSearchScheduler.timeLeft()).split('.')[0]
|
||||
except AttributeError:
|
||||
recent_search_timeleft = 'soon'
|
||||
%>
|
||||
diskfree, min_output = df()
|
||||
if min_output:
|
||||
avail = ', '.join(['%s <span class="footerhighlight">%s</span>' % (drive, free) for (drive, free) in diskfree])
|
||||
%>#slurp#
|
||||
##
|
||||
<span class="footerhighlight">$shows_total</span> shows (<span class="footerhighlight">$shows_active</span> active)
|
||||
| <span class="footerhighlight">$ep_downloaded</span><%=
|
||||
|
@ -72,9 +69,72 @@ except AttributeError:
|
|||
% (localRoot, str(ep_snatched))
|
||||
)[0 < ep_snatched]
|
||||
%> / <span class="footerhighlight">$ep_total</span> episodes downloaded $ep_percentage
|
||||
| recent search: <span class="footerhighlight">$recent_search_timeleft</span>
|
||||
| backlog search: <span class="footerhighlight">$next_backlog_timeleft</span>
|
||||
#for i, event in enumerate($MainHandler.getFooterTime(change_layout=False, json_dump=False))
|
||||
#for k, v in event.items()
|
||||
#set info = re.findall('(.*)_(timeleft|time)', k)[0]
|
||||
#if not i
|
||||
<br>next connect <i class="icon-glyph layout $info[1]" title="Change time layout"></i> for...
|
||||
#end if
|
||||
<span id="next-connect-$info[0]">| $info[0].replace('-', ' '): <span class="footerhighlight $info[0]">$v</span></span>
|
||||
#end for
|
||||
#end for
|
||||
#if diskfree
|
||||
#if min_output
|
||||
<br>free space $avail
|
||||
#else
|
||||
<div class="table-responsive">
|
||||
<style>
|
||||
.stat-table{margin:0 auto}
|
||||
.stat-table > tbody > tr > td{padding:0 5px}
|
||||
</style>
|
||||
<table class="stat-table" cellspacing="5" cellpadding="5">
|
||||
<caption style="display:none">Free space stats for volume/path</caption>
|
||||
<tbody>
|
||||
#for i, drive in enumerate(diskfree)
|
||||
<tr>
|
||||
<td>#if not i#free space#end if#</td>
|
||||
<td><span class="footerhighlight">$drive[1]</span></td>
|
||||
<td style="text-align:left">$drive[0]</td>
|
||||
</tr>
|
||||
#end for
|
||||
</tobdy>
|
||||
</table>
|
||||
</div>
|
||||
#end if
|
||||
#end if
|
||||
</div>
|
||||
<script>
|
||||
var footerTimeUrl = '$localRoot/getFooterTime';
|
||||
#raw
|
||||
function getFooterTime(params){
|
||||
params = /undefined/.test(params) && {} || params;
|
||||
$.getJSON(footerTimeUrl, params, function(data){
|
||||
var info, footerIcon$ = $('.footer').find('.icon-glyph.layout'), enabledPropers = !1;
|
||||
$.each(data, function(i, eventItems){
|
||||
$.each(eventItems, function(k, v){
|
||||
info = k.match(/(.*)_(timeleft|time)/);
|
||||
$('.footer').find('.' + info[1]).html(v);
|
||||
footerIcon$.removeClass('time').removeClass('timeleft').addClass(info[2]);
|
||||
enabledPropers |= /propers/.test(info[1]); // enable only if key is found in response
|
||||
});
|
||||
});
|
||||
|
||||
var propers$ = $('#next-connect-propers');
|
||||
if(enabledPropers){
|
||||
propers$.show();
|
||||
} else {
|
||||
propers$.hide();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
$(function(){
|
||||
$('.footer').find('.layout').click(function(){
|
||||
getFooterTime();
|
||||
});
|
||||
});
|
||||
#end raw
|
||||
</script>
|
||||
</footer>
|
||||
</body>
|
||||
</html>
|
||||
|
|
|
@ -105,9 +105,9 @@
|
|||
#slurp
|
||||
#set $curStatus, $curQuality = $Quality.splitCompositeStatus(int($ep['status']))
|
||||
#if Quality.NONE != $curQuality
|
||||
<td class="col-status">#if $SUBTITLED == $curStatus#<span class="addQTip" title="$statusStrings[$curStatus]"><i class="sgicon-subtitles" style="vertical-align:middle"></i></span>#else#$statusStrings[$curStatus].replace('Downloaded', '')#end if# <span class="quality $Quality.get_quality_css($curQuality)#if $downloaded# addQTip" title="$downloaded#end if#">$Quality.qualityStrings[$curQuality]</span></td>
|
||||
<td class="col-status">#if $SUBTITLED == $curStatus#<span class="addQTip" title="$statusStrings[$curStatus]"><i class="sgicon-subtitles" style="vertical-align:middle"></i></span>#else#$statusStrings[$curStatus].replace('Downloaded', '')#end if# <span class="quality $Quality.get_quality_css($curQuality)#if $downloaded# addQTip" title="$downloaded#end if#">$Quality.get_quality_ui($curQuality)</span></td>
|
||||
#else
|
||||
<td class="col-status">$statusStrings[$curStatus]</td>
|
||||
<td class="col-status">$statusStrings[$curStatus].replace('SD DVD', 'SD DVD/BR/BD')</td>
|
||||
#end if
|
||||
<td class="col-search">
|
||||
#if 0 != int($ep['season'])
|
||||
|
|
|
@ -3,15 +3,15 @@
|
|||
|
||||
#set $html_selected = ' selected="selected"'
|
||||
<div class="field-pair">
|
||||
<label for="qualityPreset" class="clearfix">
|
||||
#set $overall_quality = $Quality.combineQualities($anyQualities, $bestQualities)
|
||||
<label for="quality-preset" class="clearfix">
|
||||
#set $overall_quality = $Quality.combineQualities($any_qualities, $best_qualities)
|
||||
<span class="component-title input">Quality to download</span>
|
||||
<span class="component-desc">
|
||||
#set $selected = None
|
||||
<select id="qualityPreset" name="quality_preset" class="form-control form-control-inline input-sm">
|
||||
<select id="quality-preset" name="quality_preset" class="form-control form-control-inline input-sm">
|
||||
<option value="0">Custom</option>
|
||||
#for $curPreset in $qualityPresets:
|
||||
<option value="$curPreset"#echo ('', $html_selected)[$curPreset == $overall_quality]##echo ('', ' style="padding-left:15px"')[$qualityPresetStrings[$curPreset].endswith('0p') and 'UHD' not in $qualityPresetStrings[$curPreset]]#>$qualityPresetStrings[$curPreset]</option>
|
||||
#for $cur_preset in $qualityPresets:
|
||||
<option value="$cur_preset"#echo ('', $html_selected)[$cur_preset == $overall_quality]##echo ('', ' style="padding-left:15px"')[$qualityPresetStrings[$cur_preset].endswith('0p') and 'UHD' not in $qualityPresetStrings[$cur_preset]]#>$qualityPresetStrings[$cur_preset]</option>
|
||||
#end for
|
||||
</select>
|
||||
<span>tip: select a quality then "Custom" for a default selection</span>
|
||||
|
@ -19,43 +19,76 @@
|
|||
</label>
|
||||
</div>
|
||||
|
||||
<div id="customQualityWrapper">
|
||||
<div id="customQuality" class="show-if-quality-custom" style="display:none">
|
||||
<div class="component-group-desc tip-text">
|
||||
<p>An <em class="highlight-text">Initial</em> quality downloads before any <em class="highlight-text">Upgrade to</em> selections are considered.</p>
|
||||
<p>Deselect all <em class="highlight-text">Upgrade to</em> qualities to keep the first best <em class="highlight-text">Initial</em> release found.</p>
|
||||
<p>All found <em class="highlight-text">Upgrade to</em> qualities download until the best.</p>
|
||||
<div id="custom-quality-wrapper">
|
||||
<div id="custom-quality" class="show-if-quality-custom" style="display:none">
|
||||
<div class="field-pair">
|
||||
<div class="component-group-desc tip-text">
|
||||
<p style="margin-bottom:25px">An <em class="highlight-text">initial</em> quality downloads before optional upgrades</p>
|
||||
|
||||
<p id="unknown-quality" style="display:none">
|
||||
Temporarily use <em class="red-text">'Unknown'</em> to skip release qual checks.
|
||||
Results in spam if left on
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<span id="initial-quality" class="component-desc">
|
||||
<p>Select one or more qualities; the best one found when searching will be snatched</p>
|
||||
#set $any_quality_list = filter(lambda x: x > $Quality.NONE and x < $Quality.UNKNOWN, $Quality.qualityStrings)
|
||||
#set $has_unknown = False
|
||||
#for $cur_quality in sorted($any_quality_list):
|
||||
##set $has_unknown |= ($Quality.UNKNOWN == $cur_quality and $cur_quality in $any_qualities)
|
||||
<a href="#" data-quality="$cur_quality" class="btn btn-inverse dark-bg#echo ('', ' active')[$cur_quality in $any_qualities]#" role="button"><i class="icon-glyph searchadd"></i>$Quality.get_quality_ui($cur_quality)</a>
|
||||
#if $Quality.UHD4KWEB == $cur_quality
|
||||
<span class="btn-placeholder"></span>
|
||||
<span class="btn-placeholder"></span>
|
||||
<a href="#" data-quality="$Quality.UNKNOWN" class="btn btn-inverse dark-bg#echo ('', ' active')[$Quality.UNKNOWN in $any_qualities]#" role="button"><i class="icon-glyph searchadd"></i>$Quality.get_quality_ui($Quality.UNKNOWN)</a>
|
||||
|
||||
#end if
|
||||
#if $cur_quality in [$Quality.SDDVD, $Quality.FULLHDTV, $Quality.FULLHDBLURAY, $Quality.UHD4KWEB]
|
||||
<br>
|
||||
#end if
|
||||
#end for
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<span class="component-desc">
|
||||
<div class="field-pair" style="clear:both">
|
||||
<div class="component-group-desc tip-text">
|
||||
<p style="margin-bottom:25px">All found <em class="highlight-text">upgrade</em> qualities download until the best</p>
|
||||
|
||||
<p id="no-upgrade" style="display:none">No <em class="highlight-text">upgrades</em> selected, an <em class="highlight-text">initial</em> snatch will complete any search</p>
|
||||
<p id="upgrade-cond" style="display:none">An <em class="highlight-text">upgrade</em> will only search after an <em class="highlight-text">initial</em> has complete</p>
|
||||
</div>
|
||||
<span id="upgrade-quality" class="component-desc">
|
||||
<p>Optional, upgrade a completed download to any selected quality</p>
|
||||
#set $best_quality_list = filter(lambda x: x > $Quality.SDTV and x < $Quality.UNKNOWN, $Quality.qualityStrings)
|
||||
#for $cur_quality in sorted($best_quality_list):
|
||||
<a href="#" data-quality="$cur_quality" class="btn btn-inverse dark-bg#echo ('', ' active')[$cur_quality in $best_qualities]#" role="button"><i class="icon-glyph searchadd"></i>$Quality.get_quality_ui($cur_quality)</a>
|
||||
#if $cur_quality in [$Quality.SDDVD, $Quality.FULLHDTV, $Quality.FULLHDBLURAY]
|
||||
<br>
|
||||
#end if
|
||||
#end for
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<span class="component-desc bfr">
|
||||
<div style="float:left;padding-right:28px">
|
||||
<h4 class="jumbo">Initial</h4>
|
||||
#set $anyQualityList = filter(lambda x: x > $Quality.NONE, $Quality.qualityStrings)
|
||||
<select id="anyQualities" name="anyQualities" multiple="multiple" size="$len($anyQualityList)" class="form-control form-control-inline input-sm">
|
||||
#set $has_unknown = False
|
||||
#for $curQuality in sorted($anyQualityList):
|
||||
#set $has_unknown |= ($Quality.UNKNOWN == $curQuality and $curQuality in $anyQualities)
|
||||
<option value="$curQuality"#echo ('', $html_selected)[$curQuality in $anyQualities]#>$Quality.qualityStrings[$curQuality]</option>
|
||||
#set $any_quality_list = filter(lambda x: x > $Quality.NONE, $Quality.qualityStrings)
|
||||
<select id="initial-qualities" name="anyQualities" multiple="multiple" size="$len($any_quality_list)" class="form-control form-control-inline input-sm">
|
||||
#for $cur_quality in sorted($any_quality_list):
|
||||
<option value="$cur_quality"#echo ('', $html_selected)[$cur_quality in $any_qualities]#>$Quality.get_quality_ui($cur_quality)</option>
|
||||
#end for
|
||||
</select>
|
||||
</div>
|
||||
|
||||
<div style="float:left;padding-right:20px">
|
||||
<h4 class="jumbo">Upgrade to</h4>
|
||||
#set $bestQualityList = filter(lambda x: x > $Quality.SDTV and x < $Quality.UNKNOWN, $Quality.qualityStrings)
|
||||
<select id="bestQualities" name="bestQualities" multiple="multiple" size="$len($bestQualityList)" class="form-control form-control-inline input-sm">
|
||||
#for $curQuality in sorted($bestQualityList):
|
||||
<option value="$curQuality"#echo ('', $html_selected)[$curQuality in $bestQualities]#>$Quality.qualityStrings[$curQuality]</option>
|
||||
#set $best_quality_list = filter(lambda x: x > $Quality.SDTV and x < $Quality.UNKNOWN, $Quality.qualityStrings)
|
||||
<select id="upgrade-qualities" name="bestQualities" multiple="multiple" size="$len($best_quality_list)" class="form-control form-control-inline input-sm">
|
||||
#for $cur_quality in sorted($best_quality_list):
|
||||
<option value="$cur_quality"#echo ('', $html_selected)[$cur_quality in $best_qualities]#>$Quality.get_quality_ui($cur_quality)</option>
|
||||
#end for
|
||||
</select><br />
|
||||
<span>Ctrl + Click = toggle a quality</span>
|
||||
</div>
|
||||
|
||||
<div style="line-height:normal;padding-top:50px" id="quality-notes" class="tip-text">
|
||||
<p id="unknown"#if not $has_unknown# style="display:none"#end if#>
|
||||
<em class="highlight-text">Note:</em> Temporarily use 'Unknown' for releases with no recognised quality.
|
||||
Full-time use risks snatching bad releases and wastes API hits.
|
||||
</p>
|
||||
</div>
|
||||
</span>
|
||||
</div>
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
#import sickbeard
|
||||
#from sickbeard import sbdatetime
|
||||
##
|
||||
#set global $title = 'Media Search'
|
||||
#set global $header = 'Media Search'
|
||||
|
@ -8,131 +9,252 @@
|
|||
#import os.path
|
||||
#include $os.path.join($sickbeard.PROG_DIR, 'gui/slick/interfaces/default/inc_top.tmpl')
|
||||
|
||||
<input type="hidden" id="sbRoot" value="$sbRoot">
|
||||
<script type="text/javascript" src="$sbRoot/js/plotTooltip.js?v=$sbPID"></script>
|
||||
<script type="text/javascript" src="$sbRoot/js/manageSearches.js?v=$sbPID"></script>
|
||||
<div id="content800">
|
||||
|
||||
<div id="media-search" class="align-left">
|
||||
|
||||
#if $varExists('header')
|
||||
<h1 class="header">$header</h1>
|
||||
#else
|
||||
<h1 class="title">$title</h1>
|
||||
#end if
|
||||
|
||||
<div id="summary2" class="align-left">
|
||||
|
||||
|
||||
<div id="backlog-search" class="section">
|
||||
<h3>Backlog Search:</h3>
|
||||
<a id="forcebacklog" class="btn#if $standard_backlog_running or $backlog_is_active# disabled#end if#" href="$sbRoot/manage/manageSearches/forceBacklog"><i class="sgicon-play"></i> Force</a>
|
||||
<a id="pausebacklog" class="btn" href="$sbRoot/manage/manageSearches/pauseBacklog?paused=#if $backlog_paused then "0" else "1"#"><i class="#if $backlog_paused then "sgicon-play" else "sgicon-pause"#"></i> #if $backlog_paused then "Unpause" else "Pause"#</a>
|
||||
#if $backlog_paused then 'Paused: ' else ''#
|
||||
#if $backlog_paused
|
||||
Paused:
|
||||
#end if##slurp#
|
||||
#if not $backlog_running and not $backlog_is_active:
|
||||
Not in progress<br />
|
||||
Not in progress
|
||||
#else
|
||||
Currently running#if $backlog_running_type != "None"# ($backlog_running_type)#end if#<br />
|
||||
Currently running#if $backlog_running_type != "None"# ($backlog_running_type)#end if#
|
||||
#end if
|
||||
<br />
|
||||
</div>
|
||||
|
||||
|
||||
|
||||
<div id="recent-search" class="section">
|
||||
<h3>Recent Search:</h3>
|
||||
<a id="recentsearch" class="btn#if $recent_search_status# disabled#end if#" href="$sbRoot/manage/manageSearches/forceSearch"><i class="sgicon-play"></i> Force</a>
|
||||
#if not $recent_search_status
|
||||
Not in progress<br />
|
||||
Not in progress
|
||||
#else
|
||||
In Progress<br />
|
||||
In Progress
|
||||
#end if
|
||||
<br />
|
||||
</div>
|
||||
|
||||
|
||||
|
||||
<div id="propers-search" class="section">
|
||||
<h3>Find Propers Search:</h3>
|
||||
<a id="propersearch" class="btn#if $find_propers_status# disabled#end if#" href="$sbRoot/manage/manageSearches/forceFindPropers"><i class="sgicon-play"></i> Force</a>
|
||||
#if not $find_propers_status
|
||||
Not in progress<br />
|
||||
Not in progress
|
||||
#else
|
||||
In Progress<br />
|
||||
In Progress
|
||||
#end if
|
||||
<br />
|
||||
</div>
|
||||
|
||||
<br /><br />
|
||||
|
||||
<h3>Search Queue:</h3>
|
||||
|
||||
<div id="provider-failures" class="section">
|
||||
<h3>Provider Failures:</h3>
|
||||
#if not $provider_fails
|
||||
<p>No current failures. Failure stats display here when appropriate.</p>
|
||||
#else
|
||||
<p>Some providers can be often down over periods, SickGear will back off then retry connecting at a later time</p>
|
||||
#for $prov in $provider_fail_stats
|
||||
#if $len($prov['fails'])
|
||||
|
||||
<!-- $prov['name'] -->
|
||||
<div>
|
||||
<input type="button" class="shows-more btn" value="Expand" style="display:none"><input type="button" class="shows-less btn" value="Collapse"><img src="$sbRoot/images/providers/$prov['prov_img']" width="16" height="16" style="margin:0 6px 0 3px">$prov['name']
|
||||
#if $prov['active']
|
||||
#if $prov['next_try']
|
||||
#set nt = $str($prov['next_try']).split('.', 2)
|
||||
... is blocked until $sbdatetime.sbdatetime.sbftime($sbdatetime.sbdatetime.now() + $prov['next_try'], markup=True) (in $nt[0]) <input type="button" class="provider-retry btn" id="$prov['prov_id']-btn-retry" value="Ignore block on next search">
|
||||
#end if
|
||||
#else
|
||||
... is not enabled
|
||||
#end if
|
||||
</div>
|
||||
<table class="manageTable provider-failures tablesorter hover-highlight focus-highlight text-center" cellspacing="0" border="0" cellpadding="0">
|
||||
<thead>
|
||||
<tr>
|
||||
<th class="text-center" style="width:13em;padding-right:20px">period of 1hr</th>
|
||||
<th class="text-center" style="padding-right:20px">server/timeout</th>
|
||||
<th class="text-center" style="padding-right:20px">network</th>
|
||||
<th class="text-center" style="padding-right:20px">no data</th>
|
||||
<th class="text-center" style="padding-right:20px">other</th>
|
||||
#if $prov['has_limit']
|
||||
<th class="text-center" style="padding-right:20px">hit limit</th>
|
||||
#end if
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
#set $day = []
|
||||
#for $fail in $prov['fails']
|
||||
#set $child = True
|
||||
#if $fail['date'] not in $day
|
||||
#set $day += [$fail['date']]
|
||||
#set $child = False
|
||||
#end if
|
||||
#slurp#
|
||||
<tr#if $fail['multirow'] and $child# class="tablesorter-childRow"#end if#>
|
||||
#if $fail['multirow']
|
||||
#if not $child
|
||||
<td><a href="#" class="provider-fail-parent-toggle" title="Totals (expand for detail)">$sbdatetime.sbdatetime.sbfdate($fail['date_time'])</a></td>
|
||||
#else
|
||||
<td>$sbdatetime.sbdatetime.sbftime($fail['date_time'], markup=True)</td>
|
||||
#end if
|
||||
#else
|
||||
<td>$sbdatetime.sbdatetime.sbfdatetime($fail['date_time'], markup=True)</td>
|
||||
#end if
|
||||
#set $blank = '-'
|
||||
#set $title=None
|
||||
#if $fail['http']['count']
|
||||
#set $title=$fail['http']['code']
|
||||
#end if
|
||||
<td>#if $fail['http']['count']#<span title="#if $child or not $fail['multirow']#$title#else#Expand for fail codes#end if#">$fail['http']['count']</span>#else#$blank#end if# / #echo $fail['timeout'].get('count', 0) or $blank#</td>
|
||||
<td>#echo ($fail['connection'].get('count', 0) + $fail['connection_timeout'].get('count', 0)) or $blank#</td>
|
||||
<td>#echo $fail['nodata'].get('count', 0) or $blank#</td>
|
||||
<td>#echo $fail['other'].get('count', 0) or $blank#</td>
|
||||
#if $prov['has_limit']
|
||||
<td>#echo $fail.get('limit', {}).get('count', 0) or $blank#</td>
|
||||
#end if
|
||||
</tr>
|
||||
#end for
|
||||
</tbody>
|
||||
</table>
|
||||
<!-- /$prov['name'] -->
|
||||
#end if
|
||||
#end for
|
||||
#end if
|
||||
</div>
|
||||
|
||||
|
||||
|
||||
<div id="search-queues" class="section">
|
||||
<h3>Search Queues:</h3>
|
||||
|
||||
#if $queue_length['backlog'] or $queue_length['manual'] or $queue_length['failed']
|
||||
<input type="button" class="show-all-more btn" id="all-btn-more" value="Expand All"><input type="button" class="show-all-less btn" id="all-btn-less" value="Collapse All"><br>
|
||||
#end if
|
||||
<br>
|
||||
Recent: <i>$queue_length['recent'] item$sickbeard.helpers.maybe_plural($queue_length['recent'])</i><br><br>
|
||||
Proper: <i>$queue_length['proper'] item$sickbeard.helpers.maybe_plural($queue_length['proper'])</i><br><br>
|
||||
Backlog: <i>$len($queue_length['backlog']) item$sickbeard.helpers.maybe_plural($len($queue_length['backlog']))</i>
|
||||
#if $queue_length['backlog']
|
||||
<input type="button" class="shows-more btn" id="backlog-btn-more" value="Expand" #if not $queue_length['backlog']# style="display:none" #end if#><input type="button" class="shows-less btn" id="backlog-btn-less" value="Collapse" style="display:none"><br>
|
||||
<table class="sickbeardTable manageTable" cellspacing="1" border="0" cellpadding="0" style="display:none">
|
||||
<thead></thead>
|
||||
<tbody>
|
||||
<div id="queue-recent" class="section">
|
||||
Recent: <i>$queue_length['recent'] item$sickbeard.helpers.maybe_plural($queue_length['recent'])</i>
|
||||
</div>
|
||||
|
||||
|
||||
<div id="queue-proper" class="section">
|
||||
Proper: <i>$len($queue_length['proper']) item$sickbeard.helpers.maybe_plural($queue_length['proper'])</i>
|
||||
#if $queue_length['proper']
|
||||
<input type="button" class="shows-more btn" id="proper-btn-more" value="Expand" #if not $queue_length['proper']# style="display:none" #end if#><input type="button" class="shows-less btn" id="proper-btn-less" value="Collapse" style="display:none"><br>
|
||||
<table class="sickbeardTable manageTable" cellspacing="1" border="0" cellpadding="0" style="display:none">
|
||||
<thead></thead>
|
||||
<tbody>
|
||||
#set $row = 0
|
||||
#for $cur_item in $queue_length['backlog']:
|
||||
#set $search_type = 'On Demand'
|
||||
#if $cur_item['standard_backlog']:
|
||||
#if $cur_item['forced']:
|
||||
#set $search_type = 'Forced'
|
||||
#else
|
||||
#set $search_type = 'Scheduled'
|
||||
#end if
|
||||
#if $cur_item['torrent_only']:
|
||||
#set $search_type += ', Torrent Only'
|
||||
#end if
|
||||
#if $cur_item['limited_backlog']:
|
||||
#set $search_type += ' (Limited)'
|
||||
#else
|
||||
#set $search_type += ' (Full)'
|
||||
#end if
|
||||
#for $cur_item in $queue_length['proper']:
|
||||
#if $cur_item['recent']:
|
||||
#set $search_type = 'Recent'
|
||||
#else
|
||||
#set $search_type = 'Scheduled'
|
||||
#end if
|
||||
<tr class="#echo ('odd', 'even')[$row % 2]##set $row+=1#">
|
||||
<td style="width:80%;text-align:left;color:white">
|
||||
<a class="whitelink" href="$sbRoot/home/displayShow?show=$cur_item['indexerid']">$cur_item['name']</a> - $sickbeard.helpers.make_search_segment_html_string($cur_item['segment'])
|
||||
</td>
|
||||
<td style="width:20%;text-align:center;color:white">$search_type</td>
|
||||
</tr>
|
||||
<tr class="#echo ('odd', 'even')[$row % 2]##set $row+=1#">
|
||||
<td style="width:20%;text-align:center;color:white">$search_type</td>
|
||||
</tr>
|
||||
#end for
|
||||
</tbody>
|
||||
</table>
|
||||
#else
|
||||
<br>
|
||||
</tbody>
|
||||
</table>
|
||||
#end if
|
||||
<br>
|
||||
Manual: <i>$len($queue_length['manual']) item$sickbeard.helpers.maybe_plural($len($queue_length['manual']))</i>
|
||||
</div>
|
||||
|
||||
|
||||
<div id="queue-backlog" class="section">
|
||||
Backlog: <i>$len($queue_length['backlog']) item$sickbeard.helpers.maybe_plural($len($queue_length['backlog']))</i>
|
||||
#if $queue_length['backlog']
|
||||
<input type="button" class="shows-more btn" id="backlog-btn-more" value="Expand" #if not $queue_length['backlog']# style="display:none" #end if#><input type="button" class="shows-less btn" id="backlog-btn-less" value="Collapse" style="display:none"><br>
|
||||
<table class="sickbeardTable manageTable" cellspacing="1" border="0" cellpadding="0" style="display:none">
|
||||
<thead></thead>
|
||||
<tbody>
|
||||
#set $row = 0
|
||||
#for $cur_item in $queue_length['backlog']:
|
||||
#set $search_type = 'On Demand'
|
||||
#if $cur_item['standard_backlog']:
|
||||
#if $cur_item['forced']:
|
||||
#set $search_type = 'Forced'
|
||||
#else
|
||||
#set $search_type = 'Scheduled'
|
||||
#end if
|
||||
#if $cur_item['torrent_only']:
|
||||
#set $search_type += ', Torrent Only'
|
||||
#end if
|
||||
#if $cur_item['limited_backlog']:
|
||||
#set $search_type += ' (Limited)'
|
||||
#else
|
||||
#set $search_type += ' (Full)'
|
||||
#end if
|
||||
#end if
|
||||
<tr class="#echo ('odd', 'even')[$row % 2]##set $row+=1#">
|
||||
<td style="width:80%;text-align:left;color:white">
|
||||
<a class="whitelink" href="$sbRoot/home/displayShow?show=$cur_item['indexerid']">$cur_item['name']</a> - $sickbeard.helpers.make_search_segment_html_string($cur_item['segment'])
|
||||
</td>
|
||||
<td style="width:20%;text-align:center;color:white">$search_type</td>
|
||||
</tr>
|
||||
#end for
|
||||
</tbody>
|
||||
</table>
|
||||
#end if
|
||||
</div>
|
||||
|
||||
|
||||
<div id="queue-manual" class="section">
|
||||
Manual: <i>$len($queue_length['manual']) item$sickbeard.helpers.maybe_plural($len($queue_length['manual']))</i>
|
||||
#if $queue_length['manual']
|
||||
<input type="button" class="shows-more btn" id="manual-btn-more" value="Expand" #if not $queue_length['manual']# style="display:none" #end if#><input type="button" class="shows-less btn" id="manual-btn-less" value="Collapse" style="display:none"><br>
|
||||
<table class="sickbeardTable manageTable" cellspacing="1" border="0" cellpadding="0" style="display:none">
|
||||
<thead></thead>
|
||||
<tbody>
|
||||
#set $row = 0
|
||||
#for $cur_item in $queue_length['manual']:
|
||||
<tr class="#echo ('odd', 'even')[$row % 2]##set $row+=1#">
|
||||
<td style="width:100%;text-align:left;color:white">
|
||||
<a class="whitelink" href="$sbRoot/home/displayShow?show=$cur_item['indexerid']">$cur_item['name']</a> - $sickbeard.helpers.make_search_segment_html_string($cur_item['segment'])
|
||||
</td>
|
||||
</tr>
|
||||
#end for
|
||||
</tbody>
|
||||
</table>
|
||||
#else
|
||||
<br>
|
||||
<input type="button" class="shows-more btn" id="manual-btn-more" value="Expand" #if not $queue_length['manual']# style="display:none" #end if#><input type="button" class="shows-less btn" id="manual-btn-less" value="Collapse" style="display:none"><br>
|
||||
<table class="sickbeardTable manageTable" cellspacing="1" border="0" cellpadding="0" style="display:none">
|
||||
<thead></thead>
|
||||
<tbody>
|
||||
#set $row = 0
|
||||
#for $cur_item in $queue_length['manual']:
|
||||
<tr class="#echo ('odd', 'even')[$row % 2]##set $row+=1#">
|
||||
<td style="width:100%;text-align:left;color:white">
|
||||
<a class="whitelink" href="$sbRoot/home/displayShow?show=$cur_item['indexerid']">$cur_item['name']</a> - $sickbeard.helpers.make_search_segment_html_string($cur_item['segment'])
|
||||
</td>
|
||||
</tr>
|
||||
#end for
|
||||
</tbody>
|
||||
</table>
|
||||
#end if
|
||||
<br>
|
||||
Failed: <i>$len($queue_length['failed']) item$sickbeard.helpers.maybe_plural($len($queue_length['failed']))</i>
|
||||
</div>
|
||||
|
||||
|
||||
<div id="queue-failed" class="section">
|
||||
Failed: <i>$len($queue_length['failed']) item$sickbeard.helpers.maybe_plural($len($queue_length['failed']))</i>
|
||||
#if $queue_length['failed']
|
||||
<input type="button" class="shows-more btn" id="failed-btn-more" value="Expand" #if not $queue_length['failed']# style="display:none" #end if#><input type="button" class="shows-less btn" id="failed-btn-less" value="Collapse" style="display:none"><br>
|
||||
<table class="sickbeardTable manageTable" cellspacing="1" border="0" cellpadding="0" style="display:none">
|
||||
<thead></thead>
|
||||
<tbody>
|
||||
#set $row = 0
|
||||
#for $cur_item in $queue_length['failed']:
|
||||
<tr class="#echo ('odd', 'even')[$row % 2]##set $row+=1#">
|
||||
<td style="width:100%;text-align:left;color:white">
|
||||
<a class="whitelink" href="$sbRoot/home/displayShow?show=$cur_item['indexerid']">$cur_item['name']</a> - $sickbeard.helpers.make_search_segment_html_string($cur_item['segment'])
|
||||
</td>
|
||||
</tr>
|
||||
#end for
|
||||
</tbody>
|
||||
</table>
|
||||
#else
|
||||
<br>
|
||||
<input type="button" class="shows-more btn" id="failed-btn-more" value="Expand" #if not $queue_length['failed']# style="display:none" #end if#><input type="button" class="shows-less btn" id="failed-btn-less" value="Collapse" style="display:none"><br>
|
||||
<table class="sickbeardTable manageTable" cellspacing="1" border="0" cellpadding="0" style="display:none">
|
||||
<thead></thead>
|
||||
<tbody>
|
||||
#set $row = 0
|
||||
#for $cur_item in $queue_length['failed']:
|
||||
<tr class="#echo ('odd', 'even')[$row % 2]##set $row+=1#">
|
||||
<td style="width:100%;text-align:left;color:white">
|
||||
<a class="whitelink" href="$sbRoot/home/displayShow?show=$cur_item['indexerid']">$cur_item['name']</a> - $sickbeard.helpers.make_search_segment_html_string($cur_item['segment'])
|
||||
</td>
|
||||
</tr>
|
||||
#end for
|
||||
</tbody>
|
||||
</table>
|
||||
#end if
|
||||
</div>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
|
||||
#include $os.path.join($sickbeard.PROG_DIR, 'gui/slick/interfaces/default/inc_bottom.tmpl')
|
||||
|
|
|
@ -53,7 +53,7 @@
|
|||
<div class="optionWrapper">
|
||||
<span class="selectTitle">Quality</span>
|
||||
<div class="selectChoices">
|
||||
<select id="qualityPreset" name="quality_preset" class="form-control form-control-inline input-sm">
|
||||
<select id="quality-preset" name="quality_preset" class="form-control form-control-inline input-sm">
|
||||
<option value="keep">< keep ></option>
|
||||
<option value="0" #if None is not $quality_value and $quality_value not in $common.qualityPresets then $selected else ''#>Custom</option>
|
||||
#for $curPreset in sorted($common.qualityPresets):
|
||||
|
@ -62,22 +62,22 @@
|
|||
</select>
|
||||
</div><br />
|
||||
|
||||
<div id="customQuality" class="show-if-quality-custom">
|
||||
<div id="custom-quality" class="show-if-quality-custom">
|
||||
<div class="manageCustom pull-left">
|
||||
<h4 style="font-size:14px">Initial</h4>
|
||||
#set $anyQualityList = filter(lambda x: x > $common.Quality.NONE, $common.Quality.qualityStrings)
|
||||
<select id="anyQualities" name="anyQualities" multiple="multiple" size="$len($anyQualityList)">
|
||||
<select id="initial-qualities" name="anyQualities" multiple="multiple" size="$len($anyQualityList)">
|
||||
#for $curQuality in sorted($anyQualityList):
|
||||
<option value="$curQuality" #if $curQuality in $anyQualities then $selected else ''#>$common.Quality.qualityStrings[$curQuality]</option>
|
||||
<option value="$curQuality" #if $curQuality in $anyQualities then $selected else ''#>$common.Quality.get_quality_ui($curQuality)</option>
|
||||
#end for
|
||||
</select>
|
||||
</div>
|
||||
<div class="manageCustom pull-left">
|
||||
<h4 style="font-size:14px">Upgrade to</h4>
|
||||
#set $bestQualityList = filter(lambda x: x > $common.Quality.SDTV, $common.Quality.qualityStrings)
|
||||
<select id="bestQualities" name="bestQualities" multiple="multiple" size="$len($bestQualityList)">
|
||||
<select id="upgrade-qualities" name="bestQualities" multiple="multiple" size="$len($bestQualityList)">
|
||||
#for $curQuality in sorted($bestQualityList):
|
||||
<option value="$curQuality" #if $curQuality in $bestQualities then $selected else ''#>$common.Quality.qualityStrings[$curQuality]</option>
|
||||
<option value="$curQuality" #if $curQuality in $bestQualities then $selected else ''#>$common.Quality.get_quality_ui($curQuality)</option>
|
||||
#end for
|
||||
</select>
|
||||
</div><br />
|
||||
|
|
|
@ -3,10 +3,10 @@ $(document).ready(function(){
|
|||
$('#saveDefaultsButton').click(function() {
|
||||
var anyQualArray = [], bestQualArray = [];
|
||||
|
||||
$('#anyQualities option:selected').each(function(i, d) {
|
||||
$('#initial-qualities option:selected').each(function(i, d) {
|
||||
anyQualArray.push($(d).val());
|
||||
});
|
||||
$('#bestQualities option:selected').each(function(i, d) {
|
||||
$('#upgrade-qualities option:selected').each(function(i, d) {
|
||||
bestQualArray.push($(d).val());
|
||||
});
|
||||
|
||||
|
@ -32,9 +32,9 @@ $(document).ready(function(){
|
|||
$(this).attr('disabled', true);
|
||||
});
|
||||
|
||||
$('#statusSelect, #qualityPreset, #anyQualities, #bestQualities, #wanted_begin, #wanted_latest,'
|
||||
$('#statusSelect, #quality-preset, #initial-qualities, #upgrade-qualities, #wanted_begin, #wanted_latest,'
|
||||
+ ' #flatten_folders, #scene, #subtitles, #anime, #tag').change(function() {
|
||||
$('#saveDefaultsButton').attr('disabled', false);
|
||||
});
|
||||
|
||||
});
|
||||
});
|
||||
|
|
|
@ -262,6 +262,11 @@ function config_success(response) {
|
|||
$(this).show();
|
||||
});
|
||||
$('#email_show').trigger('notify');
|
||||
|
||||
// update footer only on the config page for the propers option
|
||||
if('saveSearch' == $('#configForm').attr('action')){
|
||||
getFooterTime({'change_layout': 0});
|
||||
}
|
||||
}
|
||||
|
||||
function fetch_pullrequests() {
|
||||
|
|
|
@ -1,33 +1,70 @@
|
|||
$(document).ready(function() {
|
||||
$(function(){
|
||||
$('#recentsearch,#propersearch').click(function(){
|
||||
$(this).addClass('disabled');
|
||||
})
|
||||
});
|
||||
$('#forcebacklog,#forcefullbacklog').click(function(){
|
||||
$('#forcebacklog,#forcefullbacklog').addClass('disabled');
|
||||
$('#pausebacklog').removeClass('disabled');
|
||||
})
|
||||
});
|
||||
$('#pausebacklog').click(function(){
|
||||
$(this).addClass('disabled');
|
||||
})
|
||||
});
|
||||
$('.show-all-less').click(function(){
|
||||
$(this).nextAll('table').hide();
|
||||
$(this).nextAll('input.shows-more').show();
|
||||
$(this).nextAll('input.shows-less').hide();
|
||||
})
|
||||
});
|
||||
$('.show-all-more').click(function(){
|
||||
$(this).nextAll('table').show();
|
||||
$(this).nextAll('input.shows-more').hide();
|
||||
$(this).nextAll('input.shows-less').show();
|
||||
})
|
||||
});
|
||||
|
||||
$('.shows-less').click(function(){
|
||||
$(this).nextAll('table:first').hide();
|
||||
var table$ = $(this).nextAll('table:first');
|
||||
table$ = table$.length ? table$ : $(this).parent().nextAll('table:first');
|
||||
table$.hide();
|
||||
$(this).hide();
|
||||
$(this).prevAll('input:first').show();
|
||||
})
|
||||
});
|
||||
$('.shows-more').click(function(){
|
||||
$(this).nextAll('table:first').show();
|
||||
var table$ = $(this).nextAll('table:first');
|
||||
table$ = table$.length ? table$ : $(this).parent().nextAll('table:first');
|
||||
table$.show();
|
||||
$(this).hide();
|
||||
$(this).nextAll('input:first').show();
|
||||
})
|
||||
});
|
||||
});
|
||||
$('.provider-retry').click(function () {
|
||||
$(this).addClass('disabled');
|
||||
var match = $(this).attr('id').match(/^(.+)-btn-retry$/);
|
||||
$.ajax({
|
||||
url: sbRoot + '/manage/manageSearches/retryProvider?provider=' + match[1],
|
||||
type: 'GET',
|
||||
complete: function () {
|
||||
window.location.reload(true);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
$('.provider-failures').tablesorter({widgets : ['zebra'],
|
||||
headers : { 0:{sorter:!1}, 1:{sorter:!1}, 2:{sorter:!1}, 3:{sorter:!1}, 4:{sorter:!1}, 5:{sorter:!1} }
|
||||
});
|
||||
|
||||
$('.provider-fail-parent-toggle').click(function(){
|
||||
$(this).closest('tr').nextUntil('tr:not(.tablesorter-childRow)').find('td').toggle();
|
||||
return !1;
|
||||
});
|
||||
|
||||
// Make table cell focusable
|
||||
// http://css-tricks.com/simple-css-row-column-highlighting/
|
||||
var focus$ = $('.focus-highlight');
|
||||
if (focus$.length){
|
||||
focus$.find('td, th')
|
||||
.attr('tabindex', '1')
|
||||
// add touch device support
|
||||
.on('touchstart', function(){
|
||||
$(this).focus();
|
||||
});
|
||||
}
|
||||
|
||||
});
|
||||
|
|
|
@ -251,7 +251,7 @@ $(document).ready(function () {
|
|||
$('#addShowForm').submit();
|
||||
});
|
||||
|
||||
$('#qualityPreset').change(function () {
|
||||
$('#quality-preset').change(function () {
|
||||
myform.loadsection(2);
|
||||
});
|
||||
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
function setFromPresets (preset) {
|
||||
var elCustomQuality = $('.show-if-quality-custom'),
|
||||
selected = 'selected';
|
||||
selected = 'selected', quality, selectState, btn$, dev = !1;
|
||||
if (preset = parseInt(preset)) {
|
||||
elCustomQuality.fadeOut('fast', 'linear');
|
||||
!dev && elCustomQuality.fadeOut('fast', 'linear');
|
||||
|
||||
var upgrade = !0;
|
||||
$('#anyQualities, #bestQualities').find('option').each(function() {
|
||||
if (upgrade && 'bestQualities' === $(this).parent().attr('id')) {
|
||||
$('#initial-qualities, #upgrade-qualities').find('option').each(function() {
|
||||
if (upgrade && 'upgrade-qualities' === $(this).parent().attr('id')) {
|
||||
upgrade = !1;
|
||||
switch (preset) {
|
||||
case 3: preset = 128 + 32 + 4; break;
|
||||
|
@ -15,8 +15,22 @@ function setFromPresets (preset) {
|
|||
default: preset = 0;
|
||||
}
|
||||
}
|
||||
$(this).attr(selected, ((preset & parseInt($(this).val())) ? selected : false));
|
||||
|
||||
quality = $(this).val();
|
||||
selectState = ((preset & parseInt(quality, 10)) ? selected : !1);
|
||||
$(this).attr(selected, selectState);
|
||||
|
||||
var list = /initial/.test($(this).parent().attr('id')) ? '#initial-quality': '#upgrade-quality';
|
||||
btn$ = $(/initial/.test($(this).parent().attr('id')) ? '#initial-quality': '#upgrade-quality').find('a.btn[data-quality="' + quality + '"]');
|
||||
if(!selectState){
|
||||
btn$.removeClass('active')
|
||||
|
||||
} else {
|
||||
btn$.addClass('active')
|
||||
}
|
||||
dev && console.log(preset, list, 'this.val():', quality, 'selectState:', selectState, 'hasClass:', btn$.hasClass('active'))
|
||||
});
|
||||
dev && console.log('-----------------------');
|
||||
} else
|
||||
elCustomQuality.fadeIn('fast', 'linear');
|
||||
|
||||
|
@ -24,16 +38,23 @@ function setFromPresets (preset) {
|
|||
}
|
||||
|
||||
function presentTips() {
|
||||
var tip$ = $('#unknown');
|
||||
if (/undefined/i.test($('#anyQualities').find('option[value="32768"]').attr('selected'))) {
|
||||
tip$.fadeOut('fast', 'linear');
|
||||
} else {
|
||||
var tip$ = $('#unknown-quality');
|
||||
if ($('#initial-quality').find('a.btn[data-quality="32768"]').hasClass('active')) {
|
||||
tip$.fadeIn('fast', 'linear');
|
||||
} else {
|
||||
tip$.fadeOut('fast', 'linear');
|
||||
}
|
||||
|
||||
var tip$ = $('#no-upgrade'), tip2$ = $('#upgrade-cond');
|
||||
if ($('#upgrade-quality').find('a.btn').hasClass('active')) {
|
||||
tip$.fadeOut('fast', 'linear', function(){tip2$.fadeIn('fast', 'linear');});
|
||||
} else {
|
||||
tip2$.fadeOut('fast', 'linear', function(){tip$.fadeIn('fast', 'linear');});
|
||||
}
|
||||
}
|
||||
|
||||
$(document).ready(function() {
|
||||
var elQualityPreset = $('#qualityPreset'),
|
||||
$(function() {
|
||||
var elQualityPreset = $('#quality-preset'),
|
||||
selected = ':selected';
|
||||
|
||||
elQualityPreset.change(function() {
|
||||
|
@ -42,7 +63,29 @@ $(document).ready(function() {
|
|||
|
||||
setFromPresets(elQualityPreset.find(selected).val());
|
||||
|
||||
$('#anyQualities').change(function() {
|
||||
$('#initial-qualities').change(function() {
|
||||
presentTips();
|
||||
});
|
||||
|
||||
$('#custom-quality').find('a[href="#"].btn').on('click', function(event){
|
||||
event.stopPropagation();
|
||||
|
||||
$(this).toggleClass('active');
|
||||
|
||||
var select$ = $('initial-quality' === $(this).closest('.component-desc').attr('id') ? '#initial-qualities' : '#upgrade-qualities'),
|
||||
quality = $(this).data('quality'), arrSelected = $.map(select$.val(), function(v){return parseInt(v, 10)}) || Array();
|
||||
|
||||
if($(this).hasClass('active')){
|
||||
arrSelected.push(quality);
|
||||
} else {
|
||||
arrSelected = arrSelected.filter(function(elem){
|
||||
return elem !== quality;
|
||||
});
|
||||
}
|
||||
|
||||
select$.val(arrSelected).change();
|
||||
|
||||
presentTips();
|
||||
return !1;
|
||||
});
|
||||
});
|
||||
|
|
|
@ -37,7 +37,7 @@ sys.path.insert(1, os.path.abspath('../lib'))
|
|||
from sickbeard import helpers, encodingKludge as ek
|
||||
from sickbeard import db, image_cache, logger, naming, metadata, providers, scene_exceptions, scene_numbering, \
|
||||
scheduler, auto_post_processer, search_queue, search_propers, search_recent, search_backlog, \
|
||||
show_queue, show_updater, subtitles, traktChecker, version_checker, indexermapper, classes
|
||||
show_queue, show_updater, subtitles, traktChecker, version_checker, indexermapper, classes, properFinder
|
||||
from sickbeard.config import CheckSection, check_setting_int, check_setting_str, ConfigMigrator, minimax
|
||||
from sickbeard.common import SD, SKIPPED
|
||||
from sickbeard.databases import mainDB, cache_db, failed_db
|
||||
|
@ -153,6 +153,7 @@ ROOT_DIRS = None
|
|||
TRASH_REMOVE_SHOW = False
|
||||
TRASH_ROTATE_LOGS = False
|
||||
HOME_SEARCH_FOCUS = True
|
||||
DISPLAY_FREESPACE = True
|
||||
SORT_ARTICLE = False
|
||||
DEBUG = False
|
||||
SHOW_TAGS = []
|
||||
|
@ -209,8 +210,8 @@ USENET_RETENTION = None
|
|||
TORRENT_METHOD = None
|
||||
TORRENT_DIR = None
|
||||
DOWNLOAD_PROPERS = False
|
||||
CHECK_PROPERS_INTERVAL = None
|
||||
PROPERS_WEBDL_ONEGRP = True
|
||||
WEBDL_TYPES = []
|
||||
ALLOW_HIGH_PRIORITY = False
|
||||
NEWZNAB_DATA = ''
|
||||
|
||||
|
@ -462,6 +463,7 @@ FANART_LIMIT = None
|
|||
FANART_PANEL = None
|
||||
FANART_RATINGS = {}
|
||||
HOME_LAYOUT = None
|
||||
FOOTER_TIME_LAYOUT = 0
|
||||
POSTER_SORTBY = None
|
||||
POSTER_SORTDIR = None
|
||||
DISPLAY_SHOW_VIEWMODE = 0
|
||||
|
@ -573,7 +575,7 @@ def initialize(console_logging=True):
|
|||
# Post processing
|
||||
global KEEP_PROCESSED_DIR
|
||||
# Views
|
||||
global GUI_NAME, HOME_LAYOUT, POSTER_SORTBY, POSTER_SORTDIR, DISPLAY_SHOW_SPECIALS, \
|
||||
global GUI_NAME, HOME_LAYOUT, FOOTER_TIME_LAYOUT, POSTER_SORTBY, POSTER_SORTDIR, DISPLAY_SHOW_SPECIALS, \
|
||||
EPISODE_VIEW_LAYOUT, EPISODE_VIEW_SORT, EPISODE_VIEW_DISPLAY_PAUSED, \
|
||||
EPISODE_VIEW_MISSED_RANGE, EPISODE_VIEW_POSTERS, FANART_PANEL, FANART_RATINGS, \
|
||||
EPISODE_VIEW_VIEWMODE, EPISODE_VIEW_BACKGROUND, EPISODE_VIEW_BACKGROUND_TRANSLUCENT, \
|
||||
|
@ -585,7 +587,7 @@ def initialize(console_logging=True):
|
|||
VERSION_NOTIFY, AUTO_UPDATE, UPDATE_FREQUENCY, NOTIFY_ON_UPDATE
|
||||
# Gen Config/Interface
|
||||
global THEME_NAME, DEFAULT_HOME, FANART_LIMIT, SHOWLIST_TAGVIEW, SHOW_TAGS, \
|
||||
HOME_SEARCH_FOCUS, USE_IMDB_INFO, IMDB_ACCOUNTS, SORT_ARTICLE, FUZZY_DATING, TRIM_ZERO, \
|
||||
HOME_SEARCH_FOCUS, USE_IMDB_INFO, IMDB_ACCOUNTS, DISPLAY_FREESPACE, SORT_ARTICLE, FUZZY_DATING, TRIM_ZERO, \
|
||||
DATE_PRESET, TIME_PRESET, TIME_PRESET_W_SECONDS, TIMEZONE_DISPLAY, \
|
||||
WEB_USERNAME, WEB_PASSWORD, CALENDAR_UNPROTECTED, USE_API, API_KEY, WEB_PORT, WEB_LOG, \
|
||||
ENABLE_HTTPS, HTTPS_CERT, HTTPS_KEY, WEB_IPV6, WEB_IPV64, HANDLE_REVERSE_PROXY, SEND_SECURITY_HEADERS
|
||||
|
@ -593,7 +595,7 @@ def initialize(console_logging=True):
|
|||
global BRANCH, CUR_COMMIT_BRANCH, GIT_REMOTE, CUR_COMMIT_HASH, GIT_PATH, CPU_PRESET, ANON_REDIRECT, \
|
||||
ENCRYPTION_VERSION, PROXY_SETTING, PROXY_INDEXERS, FILE_LOGGING_PRESET
|
||||
# Search Settings/Episode
|
||||
global DOWNLOAD_PROPERS, PROPERS_WEBDL_ONEGRP, CHECK_PROPERS_INTERVAL, RECENTSEARCH_FREQUENCY, \
|
||||
global DOWNLOAD_PROPERS, PROPERS_WEBDL_ONEGRP, WEBDL_TYPES, RECENTSEARCH_FREQUENCY, \
|
||||
BACKLOG_DAYS, BACKLOG_NOFULL, BACKLOG_FREQUENCY, USENET_RETENTION, IGNORE_WORDS, REQUIRE_WORDS, \
|
||||
ALLOW_HIGH_PRIORITY, SEARCH_UNAIRED, UNAIRED_RECENT_SEARCH_ONLY
|
||||
# Search Settings/NZB search
|
||||
|
@ -727,6 +729,7 @@ def initialize(console_logging=True):
|
|||
USE_IMDB_INFO = bool(check_setting_int(CFG, 'GUI', 'use_imdb_info', 1))
|
||||
IMDB_ACCOUNTS = CFG.get('GUI', []).get('imdb_accounts', [IMDB_DEFAULT_LIST_ID, IMDB_DEFAULT_LIST_NAME])
|
||||
HOME_SEARCH_FOCUS = bool(check_setting_int(CFG, 'General', 'home_search_focus', HOME_SEARCH_FOCUS))
|
||||
DISPLAY_FREESPACE = bool(check_setting_int(CFG, 'General', 'display_freespace', 1))
|
||||
SORT_ARTICLE = bool(check_setting_int(CFG, 'General', 'sort_article', 0))
|
||||
FUZZY_DATING = bool(check_setting_int(CFG, 'GUI', 'fuzzy_dating', 0))
|
||||
TRIM_ZERO = bool(check_setting_int(CFG, 'GUI', 'trim_zero', 0))
|
||||
|
@ -843,9 +846,6 @@ def initialize(console_logging=True):
|
|||
|
||||
DOWNLOAD_PROPERS = bool(check_setting_int(CFG, 'General', 'download_propers', 1))
|
||||
PROPERS_WEBDL_ONEGRP = bool(check_setting_int(CFG, 'General', 'propers_webdl_onegrp', 1))
|
||||
CHECK_PROPERS_INTERVAL = check_setting_str(CFG, 'General', 'check_propers_interval', '')
|
||||
if CHECK_PROPERS_INTERVAL not in ('15m', '45m', '90m', '4h', 'daily'):
|
||||
CHECK_PROPERS_INTERVAL = 'daily'
|
||||
|
||||
ALLOW_HIGH_PRIORITY = bool(check_setting_int(CFG, 'General', 'allow_high_priority', 1))
|
||||
|
||||
|
@ -1151,6 +1151,7 @@ def initialize(console_logging=True):
|
|||
METADATA_KODI = check_setting_str(CFG, 'General', 'metadata_kodi', '0|0|0|0|0|0|0|0|0|0')
|
||||
|
||||
HOME_LAYOUT = check_setting_str(CFG, 'GUI', 'home_layout', 'poster')
|
||||
FOOTER_TIME_LAYOUT = check_setting_int(CFG, 'GUI', 'footer_time_layout', 0)
|
||||
POSTER_SORTBY = check_setting_str(CFG, 'GUI', 'poster_sortby', 'name')
|
||||
POSTER_SORTDIR = check_setting_int(CFG, 'GUI', 'poster_sortdir', 1)
|
||||
DISPLAY_SHOW_VIEWMODE = check_setting_int(CFG, 'GUI', 'display_show_viewmode', 0)
|
||||
|
@ -1371,19 +1372,17 @@ def initialize(console_logging=True):
|
|||
prevent_cycle_run=searchQueueScheduler.action.is_standard_backlog_in_progress)
|
||||
|
||||
propers_searcher = search_propers.ProperSearcher()
|
||||
item = [(k, n, v) for (k, n, v) in propers_searcher.search_intervals if k == CHECK_PROPERS_INTERVAL]
|
||||
if item:
|
||||
update_interval = datetime.timedelta(minutes=item[0][2])
|
||||
run_at = None
|
||||
last_proper_search = datetime.datetime.fromtimestamp(properFinder.get_last_proper_search())
|
||||
time_diff = datetime.timedelta(days=1) - (datetime.datetime.now() - last_proper_search)
|
||||
if time_diff < datetime.timedelta(seconds=0):
|
||||
properdelay = 20
|
||||
else:
|
||||
update_interval = datetime.timedelta(hours=1)
|
||||
run_at = datetime.time(hour=1) # 1 AM
|
||||
properdelay = helpers.tryInt((time_diff.total_seconds() / 60) + 5, 20)
|
||||
|
||||
properFinderScheduler = scheduler.Scheduler(
|
||||
propers_searcher,
|
||||
cycleTime=update_interval,
|
||||
run_delay=update_interval,
|
||||
start_time=run_at,
|
||||
cycleTime=datetime.timedelta(days=1),
|
||||
run_delay=datetime.timedelta(minutes=properdelay),
|
||||
threadName='FINDPROPERS',
|
||||
prevent_cycle_run=searchQueueScheduler.action.is_propersearch_in_progress)
|
||||
|
||||
|
@ -1416,10 +1415,8 @@ def enabled_schedulers(is_init=False):
|
|||
# ([], [traktCheckerScheduler])[USE_TRAKT] + \
|
||||
for s in ([], [events])[is_init] + \
|
||||
[recentSearchScheduler, backlogSearchScheduler, showUpdateScheduler,
|
||||
versionCheckScheduler, showQueueScheduler, searchQueueScheduler] + \
|
||||
([], [properFinderScheduler])[DOWNLOAD_PROPERS] + \
|
||||
([], [autoPostProcesserScheduler])[PROCESS_AUTOMATICALLY] + \
|
||||
([], [subtitlesFinderScheduler])[USE_SUBTITLES] + \
|
||||
versionCheckScheduler, showQueueScheduler, searchQueueScheduler, properFinderScheduler,
|
||||
autoPostProcesserScheduler, subtitlesFinderScheduler] + \
|
||||
([events], [])[is_init]:
|
||||
yield s
|
||||
|
||||
|
@ -1503,7 +1500,7 @@ def halt():
|
|||
logger.log('Fail, thread %s did not exit' % ADBA_CONNECTION.name)
|
||||
|
||||
for thread in enabled_schedulers():
|
||||
thread.stop.set()
|
||||
thread.stop()
|
||||
|
||||
for thread in enabled_schedulers():
|
||||
try:
|
||||
|
@ -1577,7 +1574,6 @@ def save_config():
|
|||
new_config['General']['update_frequency'] = int(UPDATE_FREQUENCY)
|
||||
new_config['General']['download_propers'] = int(DOWNLOAD_PROPERS)
|
||||
new_config['General']['propers_webdl_onegrp'] = int(PROPERS_WEBDL_ONEGRP)
|
||||
new_config['General']['check_propers_interval'] = CHECK_PROPERS_INTERVAL
|
||||
new_config['General']['allow_high_priority'] = int(ALLOW_HIGH_PRIORITY)
|
||||
new_config['General']['recentsearch_startup'] = int(RECENTSEARCH_STARTUP)
|
||||
new_config['General']['backlog_nofull'] = int(BACKLOG_NOFULL)
|
||||
|
@ -1615,6 +1611,7 @@ def save_config():
|
|||
new_config['General']['trash_remove_show'] = int(TRASH_REMOVE_SHOW)
|
||||
new_config['General']['trash_rotate_logs'] = int(TRASH_ROTATE_LOGS)
|
||||
new_config['General']['home_search_focus'] = int(HOME_SEARCH_FOCUS)
|
||||
new_config['General']['display_freespace'] = int(DISPLAY_FREESPACE)
|
||||
new_config['General']['sort_article'] = int(SORT_ARTICLE)
|
||||
new_config['General']['proxy_setting'] = PROXY_SETTING
|
||||
new_config['General']['proxy_indexers'] = int(PROXY_INDEXERS)
|
||||
|
@ -1950,6 +1947,7 @@ def save_config():
|
|||
new_config['GUI']['showlist_tagview'] = SHOWLIST_TAGVIEW
|
||||
|
||||
new_config['GUI']['home_layout'] = HOME_LAYOUT
|
||||
new_config['GUI']['footer_time_layout'] = FOOTER_TIME_LAYOUT
|
||||
new_config['GUI']['poster_sortby'] = POSTER_SORTBY
|
||||
new_config['GUI']['poster_sortdir'] = POSTER_SORTDIR
|
||||
|
||||
|
|
|
@ -27,6 +27,12 @@ class PostProcesser():
|
|||
def __init__(self):
|
||||
self.amActive = False
|
||||
|
||||
@staticmethod
|
||||
def check_paused():
|
||||
if sickbeard.PROCESS_AUTOMATICALLY:
|
||||
return False
|
||||
return True
|
||||
|
||||
def run(self):
|
||||
if not sickbeard.PROCESS_AUTOMATICALLY:
|
||||
return
|
||||
|
|
|
@ -151,6 +151,10 @@ class Quality:
|
|||
return (Quality.qualityStrings[quality].replace('2160p', 'UHD2160p').replace('1080p', 'HD1080p')
|
||||
.replace('720p', 'HD720p').replace('HD TV', 'HD720p').replace('RawHD TV', 'RawHD'))
|
||||
|
||||
@staticmethod
|
||||
def get_quality_ui(quality):
|
||||
return Quality.qualityStrings[quality].replace('SD DVD', 'SD DVD/BR/BD')
|
||||
|
||||
@staticmethod
|
||||
def _getStatusStrings(status):
|
||||
toReturn = {}
|
||||
|
@ -546,6 +550,24 @@ class neededQualities(object):
|
|||
if isinstance(v, bool) and True is v:
|
||||
self.need_sd = self.need_hd = self.need_uhd = self.need_webdl = True
|
||||
|
||||
def all_show_qualities_needed(self, show):
|
||||
from sickbeard.tv import TVShow
|
||||
if isinstance(show, TVShow):
|
||||
init, upgrade = Quality.splitQuality(show.quality)
|
||||
all_qual = set(init + upgrade)
|
||||
need_sd = need_hd = need_uhd = need_webdl = False
|
||||
for wanted_qualities in all_qual:
|
||||
if not need_sd and wanted_qualities <= neededQualities.max_sd:
|
||||
need_sd = True
|
||||
if not need_hd and wanted_qualities in neededQualities.hd_qualities:
|
||||
need_hd = True
|
||||
if not need_webdl and wanted_qualities in neededQualities.webdl_qualities:
|
||||
need_webdl = True
|
||||
if not need_uhd and wanted_qualities > neededQualities.max_hd:
|
||||
need_uhd = True
|
||||
return self.need_sd == need_sd and self.need_hd == need_hd and self.need_webdl == need_webdl and \
|
||||
self.need_uhd == need_uhd
|
||||
|
||||
def check_needed_types(self, show):
|
||||
if getattr(show, 'is_anime', False):
|
||||
self.need_anime = True
|
||||
|
|
|
@ -197,15 +197,7 @@ def change_DOWNLOAD_PROPERS(download_propers):
|
|||
return
|
||||
|
||||
sickbeard.DOWNLOAD_PROPERS = download_propers
|
||||
if sickbeard.DOWNLOAD_PROPERS:
|
||||
sickbeard.properFinderScheduler.start()
|
||||
else:
|
||||
sickbeard.properFinderScheduler.stop.set()
|
||||
logger.log(u'Waiting for the PROPERFINDER thread to exit')
|
||||
try:
|
||||
sickbeard.properFinderScheduler.join(10)
|
||||
except:
|
||||
pass
|
||||
sickbeard.properFinderScheduler.check_paused()
|
||||
|
||||
|
||||
def change_USE_TRAKT(use_trakt):
|
||||
|
@ -216,7 +208,7 @@ def change_USE_TRAKT(use_trakt):
|
|||
# if sickbeard.USE_TRAKT:
|
||||
# sickbeard.traktCheckerScheduler.start()
|
||||
# else:
|
||||
# sickbeard.traktCheckerScheduler.stop.set()
|
||||
# sickbeard.traktCheckerScheduler.stop()
|
||||
# logger.log(u'Waiting for the TRAKTCHECKER thread to exit')
|
||||
# try:
|
||||
# sickbeard.traktCheckerScheduler.join(10)
|
||||
|
@ -229,21 +221,7 @@ def change_USE_SUBTITLES(use_subtitles):
|
|||
return
|
||||
|
||||
sickbeard.USE_SUBTITLES = use_subtitles
|
||||
if sickbeard.USE_SUBTITLES and not sickbeard.subtitlesFinderScheduler.isAlive():
|
||||
sickbeard.subtitlesFinderScheduler = sickbeard.scheduler.Scheduler(
|
||||
sickbeard.subtitles.SubtitlesFinder(),
|
||||
cycleTime=datetime.timedelta(hours=sickbeard.SUBTITLES_FINDER_FREQUENCY),
|
||||
threadName='FINDSUBTITLES', silent=False)
|
||||
sickbeard.subtitlesFinderScheduler.start()
|
||||
else:
|
||||
sickbeard.subtitlesFinderScheduler.stop.set()
|
||||
sickbeard.subtitlesFinderScheduler.silent = True
|
||||
threadname = sickbeard.subtitlesFinderScheduler.name
|
||||
try:
|
||||
sickbeard.subtitlesFinderScheduler.join(10)
|
||||
logger.log('Thread %s has exit' % threadname)
|
||||
except RuntimeError:
|
||||
logger.log('Fail, thread %s did not exit' % threadname)
|
||||
sickbeard.subtitlesFinderScheduler.check_paused()
|
||||
|
||||
|
||||
def CheckSection(CFG, sec):
|
||||
|
|
|
@ -17,91 +17,90 @@
|
|||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from sickbeard import db
|
||||
from collections import OrderedDict
|
||||
import re
|
||||
|
||||
MIN_DB_VERSION = 1
|
||||
MAX_DB_VERSION = 3
|
||||
MAX_DB_VERSION = 4
|
||||
TEST_BASE_VERSION = None # the base production db version, only needed for TEST db versions (>=100000)
|
||||
|
||||
|
||||
# Add new migrations at the bottom of the list; subclass the previous migration.
|
||||
class InitialSchema(db.SchemaUpgrade):
|
||||
def __init__(self, connection):
|
||||
super(InitialSchema, self).__init__(connection)
|
||||
|
||||
self.queries = OrderedDict([
|
||||
('base', [
|
||||
'CREATE TABLE lastUpdate(provider TEXT, time NUMERIC)',
|
||||
'CREATE TABLE lastSearch(provider TEXT, time NUMERIC)',
|
||||
'CREATE TABLE db_version(db_version INTEGER)',
|
||||
'INSERT INTO db_version(db_version) VALUES (1)',
|
||||
'CREATE TABLE network_timezones(network_name TEXT PRIMARY KEY, timezone TEXT)'
|
||||
]),
|
||||
('consolidate_providers', [
|
||||
'CREATE TABLE provider_cache(provider TEXT, name TEXT, season NUMERIC, episodes TEXT,'
|
||||
' indexerid NUMERIC, url TEXT UNIQUE, time NUMERIC, quality TEXT, release_group TEXT, version NUMERIC)',
|
||||
'CREATE TABLE network_conversions('
|
||||
'tvdb_network TEXT PRIMARY KEY, tvrage_network TEXT, tvrage_country TEXT)',
|
||||
'CREATE INDEX tvrage_idx ON network_conversions(tvrage_network, tvrage_country)'
|
||||
]),
|
||||
('add_backlogparts', [
|
||||
'CREATE TABLE backlogparts('
|
||||
'part NUMERIC NOT NULL, indexer NUMERIC NOT NULL, indexerid NUMERIC NOT NULL)',
|
||||
'CREATE TABLE lastrecentsearch(name TEXT PRIMARY KEY NOT NULL, datetime NUMERIC NOT NULL)'
|
||||
]),
|
||||
('add_provider_fails', [
|
||||
'CREATE TABLE provider_fails(prov_name TEXT, fail_type INTEGER, fail_code INTEGER, fail_time NUMERIC)',
|
||||
'CREATE INDEX idx_prov_name_error ON provider_fails (prov_name)',
|
||||
'CREATE UNIQUE INDEX idx_prov_errors ON provider_fails (prov_name, fail_time)',
|
||||
'CREATE TABLE provider_fails_count(prov_name TEXT PRIMARY KEY,'
|
||||
' failure_count NUMERIC, failure_time NUMERIC,'
|
||||
' tmr_limit_count NUMERIC, tmr_limit_time NUMERIC, tmr_limit_wait NUMERIC)'
|
||||
])
|
||||
])
|
||||
|
||||
def test(self):
|
||||
return self.hasTable('lastUpdate')
|
||||
|
||||
def execute(self):
|
||||
queries = [
|
||||
'CREATE TABLE lastUpdate (provider TEXT, time NUMERIC)',
|
||||
'CREATE TABLE lastSearch (provider TEXT, time NUMERIC)',
|
||||
'CREATE TABLE db_version (db_version INTEGER)',
|
||||
'INSERT INTO db_version (db_version) VALUES (1)',
|
||||
'CREATE TABLE network_timezones (network_name TEXT PRIMARY KEY, timezone TEXT)',
|
||||
'CREATE TABLE network_conversions ('
|
||||
'tvdb_network TEXT PRIMARY KEY, tvrage_network TEXT, tvrage_country TEXT)',
|
||||
'CREATE INDEX tvrage_idx on network_conversions (tvrage_network, tvrage_country)',
|
||||
'CREATE TABLE provider_cache (provider TEXT ,name TEXT, season NUMERIC, episodes TEXT,'
|
||||
' indexerid NUMERIC, url TEXT UNIQUE, time NUMERIC, quality TEXT, release_group TEXT, '
|
||||
'version NUMERIC)',
|
||||
'CREATE TABLE IF NOT EXISTS "backlogparts" ("part" NUMERIC NOT NULL ,'
|
||||
' "indexer" NUMERIC NOT NULL , "indexerid" NUMERIC NOT NULL )',
|
||||
'CREATE TABLE IF NOT EXISTS "lastrecentsearch" ("name" TEXT PRIMARY KEY NOT NULL'
|
||||
' , "datetime" NUMERIC NOT NULL )',
|
||||
]
|
||||
for query in queries:
|
||||
self.connection.action(query)
|
||||
self.setDBVersion(3)
|
||||
self.do_query(self.queries.values())
|
||||
self.setDBVersion(MAX_DB_VERSION)
|
||||
|
||||
def backup(self):
|
||||
db.backup_database('cache.db', self.checkDBVersion())
|
||||
|
||||
|
||||
class ConsolidateProviders(InitialSchema):
|
||||
def test(self):
|
||||
return self.checkDBVersion() > 1
|
||||
return 1 < self.checkDBVersion()
|
||||
|
||||
def execute(self):
|
||||
|
||||
db.backup_database('cache.db', self.checkDBVersion())
|
||||
if self.hasTable('provider_cache'):
|
||||
self.connection.action('DROP TABLE provider_cache')
|
||||
|
||||
self.connection.action('CREATE TABLE provider_cache (provider TEXT, name TEXT, season NUMERIC, episodes TEXT, '
|
||||
'indexerid NUMERIC, url TEXT UNIQUE, time NUMERIC, quality TEXT, release_group TEXT, '
|
||||
'version NUMERIC)')
|
||||
|
||||
if not self.hasTable('network_conversions'):
|
||||
self.connection.action('CREATE TABLE network_conversions ' +
|
||||
'(tvdb_network TEXT PRIMARY KEY, tvrage_network TEXT, tvrage_country TEXT)')
|
||||
self.connection.action('CREATE INDEX tvrage_idx ' +
|
||||
'on network_conversions (tvrage_network, tvrage_country)')
|
||||
|
||||
keep_tables = set(['lastUpdate', 'lastSearch', 'db_version',
|
||||
'network_timezones', 'network_conversions', 'provider_cache'])
|
||||
current_tables = set(self.listTables())
|
||||
remove_tables = list(current_tables - keep_tables)
|
||||
for table in remove_tables:
|
||||
self.connection.action('DROP TABLE [%s]' % table)
|
||||
|
||||
self.incDBVersion()
|
||||
self.backup()
|
||||
keep_tables = {'lastUpdate', 'lastSearch', 'db_version',
|
||||
'network_timezones', 'network_conversions', 'provider_cache'}
|
||||
# old provider_cache is dropped before re-creation
|
||||
self.do_query(['DROP TABLE [provider_cache]'] + self.queries['consolidate_providers'] +
|
||||
['DROP TABLE [%s]' % t for t in (set(self.listTables()) - keep_tables)])
|
||||
self.finish(True)
|
||||
|
||||
|
||||
class AddBacklogParts(ConsolidateProviders):
|
||||
def test(self):
|
||||
return self.checkDBVersion() > 2
|
||||
return 2 < self.checkDBVersion()
|
||||
|
||||
def execute(self):
|
||||
self.backup()
|
||||
self.do_query(self.queries['add_backlogparts'] +
|
||||
['DROP TABLE [%s]' % t for t in ('scene_names', 'scene_exceptions_refresh', 'scene_exceptions')])
|
||||
self.finish(True)
|
||||
|
||||
db.backup_database('cache.db', self.checkDBVersion())
|
||||
if self.hasTable('scene_names'):
|
||||
self.connection.action('DROP TABLE scene_names')
|
||||
|
||||
if not self.hasTable('backlogparts'):
|
||||
self.connection.action('CREATE TABLE IF NOT EXISTS "backlogparts" ("part" NUMERIC NOT NULL ,'
|
||||
' "indexer" NUMERIC NOT NULL , "indexerid" NUMERIC NOT NULL )')
|
||||
class AddProviderFailureHandling(AddBacklogParts):
|
||||
def test(self):
|
||||
return 3 < self.checkDBVersion()
|
||||
|
||||
if not self.hasTable('lastrecentsearch'):
|
||||
self.connection.action('CREATE TABLE IF NOT EXISTS "lastrecentsearch" ("name" TEXT PRIMARY KEY NOT NULL'
|
||||
' , "datetime" NUMERIC NOT NULL )')
|
||||
|
||||
if self.hasTable('scene_exceptions_refresh'):
|
||||
self.connection.action('DROP TABLE scene_exceptions_refresh')
|
||||
if self.hasTable('scene_exceptions'):
|
||||
self.connection.action('DROP TABLE scene_exceptions')
|
||||
self.connection.action('VACUUM')
|
||||
|
||||
self.incDBVersion()
|
||||
def execute(self):
|
||||
self.backup()
|
||||
self.do_query(self.queries['add_provider_fails'])
|
||||
self.finish()
|
||||
|
|
|
@ -21,6 +21,7 @@ from sickbeard.common import Quality
|
|||
|
||||
MIN_DB_VERSION = 1
|
||||
MAX_DB_VERSION = 1
|
||||
TEST_BASE_VERSION = None # the base production db version, only needed for TEST db versions (>=100000)
|
||||
|
||||
# Add new migrations at the bottom of the list; subclass the previous migration.
|
||||
class InitialSchema(db.SchemaUpgrade):
|
||||
|
|
|
@ -27,7 +27,8 @@ from sickbeard import encodingKludge as ek
|
|||
from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException
|
||||
|
||||
MIN_DB_VERSION = 9 # oldest db version we support migrating from
|
||||
MAX_DB_VERSION = 20006
|
||||
MAX_DB_VERSION = 20008
|
||||
TEST_BASE_VERSION = None # the base production db version, only needed for TEST db versions (>=100000)
|
||||
|
||||
|
||||
class MainSanityCheck(db.DBSanityCheck):
|
||||
|
@ -1258,3 +1259,23 @@ class AddFlagTable(db.SchemaUpgrade):
|
|||
|
||||
self.setDBVersion(20006)
|
||||
return self.checkDBVersion()
|
||||
|
||||
|
||||
# 20006 -> 20007
|
||||
class DBIncreaseTo20007(db.SchemaUpgrade):
|
||||
def execute(self):
|
||||
|
||||
logger.log(u'Bumping database version')
|
||||
|
||||
self.setDBVersion(20007)
|
||||
return self.checkDBVersion()
|
||||
|
||||
|
||||
# 20007 -> 20008
|
||||
class AddWebdlTypesTable(db.SchemaUpgrade):
|
||||
def execute(self):
|
||||
db.backup_database('sickbeard.db', self.checkDBVersion())
|
||||
self.connection.action('CREATE TABLE webdl_types (dname TEXT NOT NULL , regex TEXT NOT NULL )')
|
||||
|
||||
self.setDBVersion(20008)
|
||||
return self.checkDBVersion()
|
||||
|
|
|
@ -432,6 +432,26 @@ class SchemaUpgrade(object):
|
|||
tables.append(table[0])
|
||||
return tables
|
||||
|
||||
def do_query(self, queries):
|
||||
if not isinstance(queries, list):
|
||||
queries = list(queries)
|
||||
elif isinstance(queries[0], list):
|
||||
queries = [item for sublist in queries for item in sublist]
|
||||
|
||||
for query in queries:
|
||||
tbl_name = re.findall('(?i)DROP.*?TABLE.*?\[?([^\s\]]+)', query)
|
||||
if tbl_name and not self.hasTable(tbl_name[0]):
|
||||
continue
|
||||
tbl_name = re.findall('(?i)CREATE.*?TABLE.*?\s([^\s(]+)\s*\(', query)
|
||||
if tbl_name and self.hasTable(tbl_name[0]):
|
||||
continue
|
||||
self.connection.action(query)
|
||||
|
||||
def finish(self, tbl_dropped=False):
|
||||
if tbl_dropped:
|
||||
self.connection.action('VACUUM')
|
||||
self.incDBVersion()
|
||||
|
||||
|
||||
def MigrationCode(myDB):
|
||||
schema = {
|
||||
|
@ -492,7 +512,9 @@ def MigrationCode(myDB):
|
|||
20002: sickbeard.mainDB.AddTvShowTags,
|
||||
20003: sickbeard.mainDB.ChangeMapIndexer,
|
||||
20004: sickbeard.mainDB.AddShowNotFoundCounter,
|
||||
20005: sickbeard.mainDB.AddFlagTable
|
||||
20005: sickbeard.mainDB.AddFlagTable,
|
||||
20006: sickbeard.mainDB.DBIncreaseTo20007,
|
||||
20007: sickbeard.mainDB.AddWebdlTypesTable,
|
||||
# 20002: sickbeard.mainDB.AddCoolSickGearFeature3,
|
||||
}
|
||||
|
||||
|
|
|
@ -2,44 +2,49 @@ from lib.six import moves
|
|||
|
||||
import threading
|
||||
|
||||
|
||||
class Event:
|
||||
def __init__(self, type):
|
||||
self._type = type
|
||||
def __init__(self, etype):
|
||||
self._type = etype
|
||||
|
||||
@property
|
||||
def type(self):
|
||||
return self._type
|
||||
|
||||
|
||||
class Events(threading.Thread):
|
||||
def __init__(self, callback):
|
||||
super(Events, self).__init__()
|
||||
self.queue = moves.queue.Queue()
|
||||
self.daemon = True
|
||||
self.callback = callback
|
||||
self.name = "EVENT-QUEUE"
|
||||
self.stop = threading.Event()
|
||||
self.name = 'EVENT-QUEUE'
|
||||
self._stop = threading.Event()
|
||||
|
||||
def put(self, type):
|
||||
self.queue.put(type)
|
||||
def put(self, etype):
|
||||
self.queue.put(etype)
|
||||
|
||||
def stop(self):
|
||||
self._stop.set()
|
||||
|
||||
def run(self):
|
||||
while (not self.stop.is_set()):
|
||||
while not self._stop.is_set():
|
||||
try:
|
||||
# get event type
|
||||
type = self.queue.get(True, 1)
|
||||
etype = self.queue.get(True, 1)
|
||||
|
||||
# perform callback if we got a event type
|
||||
self.callback(type)
|
||||
self.callback(etype)
|
||||
|
||||
# event completed
|
||||
self.queue.task_done()
|
||||
except moves.queue.Empty:
|
||||
type = None
|
||||
pass
|
||||
|
||||
# exiting thread
|
||||
self.stop.clear()
|
||||
self._stop.clear()
|
||||
|
||||
# System Events
|
||||
class SystemEvent(Event):
|
||||
RESTART = "RESTART"
|
||||
SHUTDOWN = "SHUTDOWN"
|
||||
RESTART = 'RESTART'
|
||||
SHUTDOWN = 'SHUTDOWN'
|
||||
|
|
|
@ -275,7 +275,7 @@ def searchIndexerForShowID(regShowName, indexer=None, indexer_id=None, ui=None):
|
|||
|
||||
|
||||
def sizeof_fmt(num):
|
||||
for x in ['bytes', 'KB', 'MB', 'GB', 'TB']:
|
||||
for x in ['bytes', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB']:
|
||||
if num < 1024.0:
|
||||
return "%3.1f %s" % (num, x)
|
||||
num /= 1024.0
|
||||
|
@ -1556,3 +1556,62 @@ def datetime_to_epoch(dt):
|
|||
dt = dt.replace(tzinfo=sb_timezone)
|
||||
utc_naive = dt.replace(tzinfo=None) - dt.utcoffset()
|
||||
return int((utc_naive - datetime.datetime(1970, 1, 1)).total_seconds())
|
||||
|
||||
|
||||
def df():
|
||||
"""
|
||||
Return disk free space at known parent locations
|
||||
|
||||
:return: string path, string value that is formatted size
|
||||
:rtype: list of tuples
|
||||
"""
|
||||
result = []
|
||||
min_output = True
|
||||
if sickbeard.ROOT_DIRS and sickbeard.DISPLAY_FREESPACE:
|
||||
targets = []
|
||||
for path in sickbeard.ROOT_DIRS.split('|')[1:]:
|
||||
location_parts = os.path.splitdrive(path)
|
||||
target = location_parts[0]
|
||||
if 'win32' == sys.platform:
|
||||
if not re.match('(?i)[a-z]:(?:\\\\)?$', target):
|
||||
# simple drive letter not found, fallback to full path
|
||||
target = path
|
||||
min_output = False
|
||||
elif sys.platform.startswith(('linux', 'darwin', 'sunos5')) or 'bsd' in sys.platform:
|
||||
target = path
|
||||
min_output = False
|
||||
if target and target not in targets:
|
||||
targets += [target]
|
||||
free = freespace(path)
|
||||
if None is not free:
|
||||
result += [(target, sizeof_fmt(free).replace(' ', ''))]
|
||||
return result, min_output
|
||||
|
||||
|
||||
def freespace(path=None):
|
||||
"""
|
||||
Return free space available at path location
|
||||
|
||||
:param path: Example paths (Windows) = '\\\\192.168.0.1\\sharename\\existing_path', 'd:\\existing_path'
|
||||
Untested with mount points under linux
|
||||
:type path: basestring
|
||||
:return: Size in bytes
|
||||
:rtype: long
|
||||
"""
|
||||
result = None
|
||||
|
||||
if 'win32' == sys.platform:
|
||||
try:
|
||||
import ctypes
|
||||
if None is not ctypes:
|
||||
max_val = (2 ** 64) - 1
|
||||
storage = ctypes.c_ulonglong(max_val)
|
||||
ctypes.windll.kernel32.GetDiskFreeSpaceExW(ctypes.c_wchar_p(path), None, None, ctypes.pointer(storage))
|
||||
result = (storage.value, None)[max_val == storage.value]
|
||||
except(StandardError, Exception):
|
||||
pass
|
||||
elif sys.platform.startswith(('linux', 'darwin', 'sunos5')) or 'bsd' in sys.platform:
|
||||
storage = os.statvfs(path)
|
||||
result = storage.f_bavail * storage.f_frsize
|
||||
|
||||
return result
|
||||
|
|
|
@ -113,7 +113,7 @@ class NameParser(object):
|
|||
return
|
||||
|
||||
matches = []
|
||||
|
||||
initial_best_result = None
|
||||
for reg_ex in self.compiled_regexes:
|
||||
for (cur_regex_num, cur_regex_name, cur_regex) in self.compiled_regexes[reg_ex]:
|
||||
new_name = helpers.remove_non_release_groups(name, 'anime' in cur_regex_name)
|
||||
|
@ -266,6 +266,13 @@ class NameParser(object):
|
|||
|
||||
# if this is a naming pattern test then return best result
|
||||
if not show or self.naming_pattern:
|
||||
if not show and not self.naming_pattern and not self.testing:
|
||||
# ensure anime regex test but use initial best if show still not found
|
||||
if 0 == reg_ex:
|
||||
initial_best_result = best_result
|
||||
matches = [] # clear non-anime match scores
|
||||
continue
|
||||
return initial_best_result
|
||||
return best_result
|
||||
|
||||
# get quality
|
||||
|
|
|
@ -302,15 +302,15 @@ anime_regexes = [
|
|||
# Bleach s16e03e04 313-314
|
||||
'''
|
||||
^(\[(?P<release_group>.+?)\][ ._-]*)?
|
||||
(?P<series_name>.+?)[ ._-]+ # start of string and series name and non optinal separator
|
||||
(?P<series_name>.+?)[ ._-]+ # start of string and series name and non optional separator
|
||||
[sS](?P<season_num>\d+)[. _-]* # S01 and optional separator
|
||||
[eE](?P<ep_num>\d+) # epipisode E02
|
||||
[eE](?P<ep_num>\d+) # episode E02
|
||||
(([. _-]*e|-) # linking e/- char
|
||||
(?P<extra_ep_num>\d+))* # additional E03/etc
|
||||
([ ._-]{2,}|[ ._]+) # if "-" is used to separate at least something else has to be
|
||||
# there(->{2,}) "s16e03-04-313-314" would make sens any way
|
||||
(?P<ep_ab_num>\d{1,3}) # absolute number
|
||||
(-(?P<extra_ab_ep_num>\d{1,3}))* # "-" as separator and anditional absolute number, all optinal
|
||||
(?<!H.)(?P<ep_ab_num>\d{1,3})(?!0p) # absolute number
|
||||
(-(?P<extra_ab_ep_num>\d{1,3}))* # "-" as separator and additional absolute number, all optional
|
||||
(v(?P<version>[0-9]))? # the version e.g. "v2"
|
||||
.*?
|
||||
'''
|
||||
|
@ -321,15 +321,15 @@ anime_regexes = [
|
|||
# Bleach.s16e03-04.313-314
|
||||
# Bleach s16e03e04 313-314
|
||||
'''
|
||||
^(?P<series_name>.+?)[ ._-]+ # start of string and series name and non optinal separator
|
||||
^(?P<series_name>.+?)[ ._-]+ # start of string and series name and non optional separator
|
||||
(?P<season_num>\d+)[. _-]* # S01 and optional separator
|
||||
[xX](?P<ep_num>\d+) # epipisode E02
|
||||
[xX](?P<ep_num>\d+) # episode E02
|
||||
(([. _-]*e|-) # linking e/- char
|
||||
(?P<extra_ep_num>\d+))* # additional E03/etc
|
||||
([ ._-]{2,}|[ ._]+) # if "-" is used to separate at least something else has to be
|
||||
# there(->{2,}) "s16e03-04-313-314" would make sens any way
|
||||
(?P<ep_ab_num>\d{1,3}) # absolute number
|
||||
(-(?P<extra_ab_ep_num>\d{1,3}))* # "-" as separator and anditional absolute number, all optinal
|
||||
(?<!H.)(?P<ep_ab_num>\d{1,3})(?!0p) # absolute number
|
||||
(-(?P<extra_ab_ep_num>\d{1,3}))* # "-" as separator and additional absolute number, all optional
|
||||
(v(?P<version>[0-9]))? # the version e.g. "v2"
|
||||
.*?
|
||||
'''
|
||||
|
@ -338,14 +338,14 @@ anime_regexes = [
|
|||
('anime_and_normal_reverse',
|
||||
# Bleach - 313-314 - s16e03-04
|
||||
'''
|
||||
^(?P<series_name>.+?)[ ._-]+ # start of string and series name and non optinal separator
|
||||
(?P<ep_ab_num>\d{1,3}) # absolute number
|
||||
(-(?P<extra_ab_ep_num>\d{1,3}))* # "-" as separator and anditional absolute number, all optinal
|
||||
^(?P<series_name>.+?)[ ._-]+ # start of string and series name and non optional separator
|
||||
(?<!H.)(?P<ep_ab_num>\d{1,3})(?!0p) # absolute number
|
||||
(-(?P<extra_ab_ep_num>\d{1,3}))* # "-" as separator and additional absolute number, all optional
|
||||
(v(?P<version>[0-9]))? # the version e.g. "v2"
|
||||
([ ._-]{2,}|[ ._]+) # if "-" is used to separate at least something else has to be
|
||||
# there(->{2,}) "s16e03-04-313-314" would make sens any way
|
||||
[sS](?P<season_num>\d+)[. _-]* # S01 and optional separator
|
||||
[eE](?P<ep_num>\d+) # epipisode E02
|
||||
[eE](?P<ep_num>\d+) # episode E02
|
||||
(([. _-]*e|-) # linking e/- char
|
||||
(?P<extra_ep_num>\d+))* # additional E03/etc
|
||||
.*?
|
||||
|
@ -355,8 +355,8 @@ anime_regexes = [
|
|||
('anime_and_normal_front',
|
||||
# 165.Naruto Shippuuden.s08e014
|
||||
'''
|
||||
^(?P<ep_ab_num>\d{1,3}) # start of string and absolute number
|
||||
(-(?P<extra_ab_ep_num>\d{1,3}))* # "-" as separator and anditional absolute number, all optinal
|
||||
^(?<!H.)(?P<ep_ab_num>\d{1,3})(?!0p) # start of string and absolute number
|
||||
(-(?P<extra_ab_ep_num>\d{1,3}))* # "-" as separator and additional absolute number, all optional
|
||||
(v(?P<version>[0-9]))?[ ._-]+ # the version e.g. "v2"
|
||||
(?P<series_name>.+?)[ ._-]+
|
||||
[sS](?P<season_num>\d+)[. _-]* # S01 and optional separator
|
||||
|
@ -371,7 +371,7 @@ anime_regexes = [
|
|||
'''
|
||||
^(?:\[(?P<release_group>.+?)\][ ._-]*)
|
||||
(?P<series_name>.+?)[ ._-]+
|
||||
(?P<ep_ab_num>\d{1,3})
|
||||
(?<!H.)(?P<ep_ab_num>\d{1,3})(?!0p)
|
||||
(-(?P<extra_ab_ep_num>\d{1,3}))*[ ._-]*?
|
||||
(?:v(?P<version>[0-9])[ ._-]+?)?
|
||||
(?:.+?[ ._-]+?)?
|
||||
|
@ -381,8 +381,19 @@ anime_regexes = [
|
|||
'''
|
||||
),
|
||||
|
||||
('anime_bare',
|
||||
('anime_bare_ep',
|
||||
# One Piece - 102
|
||||
# Show Name 123 - 001
|
||||
'''
|
||||
^(?:\[(?P<release_group>.+?)\][ ._-]*)?
|
||||
(?P<series_name>.+?)[ ._-]+[ ._-]{2,} # Show_Name and min 2 char separator
|
||||
(?<!H.)(?P<ep_ab_num>\d{1,3})(?!0p) # 1/001, while avoiding H.264 and 1080p from being matched
|
||||
(-(?P<extra_ab_ep_num>\d{1,3}))*[ ._-]* # 2/002
|
||||
(?:v(?P<version>[0-9]))? # v2
|
||||
'''
|
||||
),
|
||||
|
||||
('anime_bare',
|
||||
# [ACX]_Wolf's_Spirit_001.mkv
|
||||
'''
|
||||
^(\[(?P<release_group>.+?)\][ ._-]*)?
|
||||
|
|
|
@ -226,7 +226,10 @@ def update_network_dict():
|
|||
|
||||
try:
|
||||
for line in url_data.splitlines():
|
||||
(key, val) = line.decode('utf-8').strip().rsplit(u':', 1)
|
||||
try:
|
||||
(key, val) = line.decode('utf-8').strip().rsplit(u':', 1)
|
||||
except (StandardError, Exception):
|
||||
continue
|
||||
if key is None or val is None:
|
||||
continue
|
||||
d[key] = val
|
||||
|
|
|
@ -27,27 +27,28 @@ import sickbeard
|
|||
|
||||
from sickbeard import db, exceptions, helpers, history, logger, search, show_name_helpers
|
||||
from sickbeard import encodingKludge as ek
|
||||
from sickbeard.common import DOWNLOADED, SNATCHED_ANY, SNATCHED_PROPER, Quality, ARCHIVED, FAILED
|
||||
from sickbeard.common import DOWNLOADED, SNATCHED_ANY, SNATCHED_PROPER, Quality, ARCHIVED, FAILED, neededQualities
|
||||
from sickbeard.exceptions import ex, MultipleShowObjectsException
|
||||
from sickbeard import failed_history
|
||||
from sickbeard.history import dateFormat
|
||||
from sickbeard.sbdatetime import sbdatetime
|
||||
|
||||
from name_parser.parser import NameParser, InvalidNameException, InvalidShowException
|
||||
|
||||
|
||||
def search_propers():
|
||||
def search_propers(proper_list=None):
|
||||
|
||||
if not sickbeard.DOWNLOAD_PROPERS:
|
||||
return
|
||||
|
||||
logger.log(u'Beginning search for new propers')
|
||||
logger.log(('Checking propers from recent search', 'Beginning search for new propers')[None is proper_list])
|
||||
|
||||
age_shows, age_anime = sickbeard.BACKLOG_DAYS + 2, 14
|
||||
aired_since_shows = datetime.datetime.today() - datetime.timedelta(days=age_shows)
|
||||
aired_since_anime = datetime.datetime.today() - datetime.timedelta(days=age_anime)
|
||||
recent_shows, recent_anime = _recent_history(aired_since_shows, aired_since_anime)
|
||||
if recent_shows or recent_anime:
|
||||
propers = _get_proper_list(aired_since_shows, recent_shows, recent_anime)
|
||||
propers = _get_proper_list(aired_since_shows, recent_shows, recent_anime, proper_list=proper_list)
|
||||
|
||||
if propers:
|
||||
_download_propers(propers)
|
||||
|
@ -55,52 +56,59 @@ def search_propers():
|
|||
logger.log(u'No downloads or snatches found for the last %s%s days to use for a propers search' %
|
||||
(age_shows, ('', ' (%s for anime)' % age_anime)[helpers.has_anime()]))
|
||||
|
||||
_set_last_proper_search(datetime.datetime.today().toordinal())
|
||||
|
||||
run_at = ''
|
||||
proper_sch = sickbeard.properFinderScheduler
|
||||
if None is proper_sch.start_time:
|
||||
run_in = proper_sch.lastRun + proper_sch.cycleTime - datetime.datetime.now()
|
||||
run_at = u', next check '
|
||||
if datetime.timedelta() > run_in:
|
||||
run_at += u'imminent'
|
||||
else:
|
||||
hours, remainder = divmod(run_in.seconds, 3600)
|
||||
minutes, seconds = divmod(remainder, 60)
|
||||
run_at += u'in approx. ' + ('%dh, %dm' % (hours, minutes) if 0 < hours else '%dm, %ds' % (minutes, seconds))
|
||||
if None is proper_list:
|
||||
_set_last_proper_search(datetime.datetime.now())
|
||||
|
||||
logger.log(u'Completed the search for new propers%s' % run_at)
|
||||
proper_sch = sickbeard.properFinderScheduler
|
||||
if None is proper_sch.start_time:
|
||||
run_in = proper_sch.lastRun + proper_sch.cycleTime - datetime.datetime.now()
|
||||
run_at = u', next check '
|
||||
if datetime.timedelta() > run_in:
|
||||
run_at += u'imminent'
|
||||
else:
|
||||
hours, remainder = divmod(run_in.seconds, 3600)
|
||||
minutes, seconds = divmod(remainder, 60)
|
||||
run_at += u'in approx. ' + ('%dh, %dm' % (hours, minutes) if 0 < hours else
|
||||
'%dm, %ds' % (minutes, seconds))
|
||||
|
||||
logger.log(u'Completed search for new propers%s' % run_at)
|
||||
else:
|
||||
logger.log(u'Completed checking propers from recent search')
|
||||
|
||||
|
||||
def get_old_proper_level(showObj, indexer, indexerid, season, episodes, old_status, new_quality,
|
||||
def get_old_proper_level(show_obj, indexer, indexerid, season, episodes, old_status, new_quality,
|
||||
extra_no_name, version, is_anime=False):
|
||||
level = 0
|
||||
is_internal = False
|
||||
codec = ''
|
||||
rel_name = None
|
||||
if old_status not in SNATCHED_ANY:
|
||||
level = Quality.get_proper_level(extra_no_name, version, is_anime)
|
||||
elif showObj:
|
||||
myDB = db.DBConnection()
|
||||
np = NameParser(False, showObj=showObj)
|
||||
elif show_obj:
|
||||
my_db = db.DBConnection()
|
||||
np = NameParser(False, showObj=show_obj)
|
||||
for episode in episodes:
|
||||
result = myDB.select('SELECT resource FROM history WHERE showid = ? AND season = ? AND episode = ? AND '
|
||||
'(' + ' OR '.join("action LIKE '%%%02d'" % x for x in SNATCHED_ANY) + ') '
|
||||
'ORDER BY date DESC LIMIT 1',
|
||||
[indexerid, season, episode])
|
||||
result = my_db.select('SELECT resource FROM history WHERE showid = ? AND season = ? AND episode = ? AND '
|
||||
'(' + ' OR '.join("action LIKE '%%%02d'" % x for x in SNATCHED_ANY) + ') '
|
||||
'ORDER BY date DESC LIMIT 1',
|
||||
[indexerid, season, episode])
|
||||
if not result or not isinstance(result[0]['resource'], basestring) or not result[0]['resource']:
|
||||
continue
|
||||
nq = Quality.sceneQuality(result[0]['resource'], showObj.is_anime)
|
||||
nq = Quality.sceneQuality(result[0]['resource'], show_obj.is_anime)
|
||||
if nq != new_quality:
|
||||
continue
|
||||
try:
|
||||
p = np.parse(result[0]['resource'])
|
||||
except (StandardError, Exception):
|
||||
continue
|
||||
level = Quality.get_proper_level(p.extra_info_no_name(), p.version, showObj.is_anime)
|
||||
level = Quality.get_proper_level(p.extra_info_no_name(), p.version, show_obj.is_anime)
|
||||
extra_no_name = p.extra_info_no_name()
|
||||
rel_name = result[0]['resource']
|
||||
is_internal = p.extra_info_no_name() and re.search(r'\binternal\b', p.extra_info_no_name(), flags=re.I)
|
||||
codec = _get_codec(p.extra_info_no_name())
|
||||
break
|
||||
return level, is_internal, codec
|
||||
return level, is_internal, codec, extra_no_name, rel_name
|
||||
|
||||
|
||||
def _get_codec(extra_info_no_name):
|
||||
|
@ -110,12 +118,66 @@ def _get_codec(extra_info_no_name):
|
|||
return '264'
|
||||
elif re.search(r'\bxvid\b', extra_info_no_name, flags=re.I):
|
||||
return 'xvid'
|
||||
elif re.search(r'\b[xh]265|hevc\b', extra_info_no_name, flags=re.I):
|
||||
elif re.search(r'\b[xh]\W?265|hevc\b', extra_info_no_name, flags=re.I):
|
||||
return 'hevc'
|
||||
return ''
|
||||
|
||||
|
||||
def _get_proper_list(aired_since_shows, recent_shows, recent_anime):
|
||||
def get_webdl_type(extra_info_no_name, rel_name):
|
||||
if not sickbeard.WEBDL_TYPES:
|
||||
load_webdl_types()
|
||||
|
||||
for t in sickbeard.WEBDL_TYPES:
|
||||
try:
|
||||
if re.search(r'\b%s\b' % t[1], extra_info_no_name, flags=re.I):
|
||||
return t[0]
|
||||
except (StandardError, Exception):
|
||||
continue
|
||||
|
||||
return ('webdl', 'webrip')[None is re.search(r'\bweb.?dl\b', rel_name, flags=re.I)]
|
||||
|
||||
|
||||
def load_webdl_types():
|
||||
new_types = []
|
||||
default_types = [('Amazon', r'AMZN|AMAZON'), ('Netflix', r'NETFLIX|NF'), ('Hulu', r'HULU')]
|
||||
url = 'https://raw.githubusercontent.com/SickGear/sickgear.extdata/master/SickGear/webdl_types.txt'
|
||||
url_data = helpers.getURL(url)
|
||||
|
||||
my_db = db.DBConnection()
|
||||
sql_results = my_db.select('SELECT * FROM webdl_types')
|
||||
old_types = [(r['dname'], r['regex']) for r in sql_results]
|
||||
|
||||
if isinstance(url_data, basestring) and url_data.strip():
|
||||
try:
|
||||
for line in url_data.splitlines():
|
||||
try:
|
||||
(key, val) = line.decode('utf-8').strip().split(u'::', 1)
|
||||
except (StandardError, Exception):
|
||||
continue
|
||||
if key is None or val is None:
|
||||
continue
|
||||
new_types.append((key, val))
|
||||
except (IOError, OSError):
|
||||
pass
|
||||
|
||||
cl = []
|
||||
for nt in new_types:
|
||||
if nt not in old_types:
|
||||
cl.append(['REPLACE INTO webdl_types (dname, regex) VALUES (?,?)', [nt[0], nt[1]]])
|
||||
|
||||
for ot in old_types:
|
||||
if ot not in new_types:
|
||||
cl.append(['DELETE FROM webdl_types WHERE dname = ? AND regex = ?', [ot[0], ot[1]]])
|
||||
|
||||
if cl:
|
||||
my_db.mass_action(cl)
|
||||
else:
|
||||
new_types = old_types
|
||||
|
||||
sickbeard.WEBDL_TYPES = new_types + default_types
|
||||
|
||||
|
||||
def _get_proper_list(aired_since_shows, recent_shows, recent_anime, proper_list=None):
|
||||
propers = {}
|
||||
|
||||
# for each provider get a list of the
|
||||
|
@ -124,22 +186,28 @@ def _get_proper_list(aired_since_shows, recent_shows, recent_anime):
|
|||
for cur_provider in providers:
|
||||
if not recent_anime and cur_provider.anime_only:
|
||||
continue
|
||||
threading.currentThread().name = orig_thread_name + ' :: [' + cur_provider.name + ']'
|
||||
|
||||
logger.log(u'Searching for new PROPER releases')
|
||||
if None is not proper_list:
|
||||
found_propers = proper_list.get(cur_provider.get_id(), [])
|
||||
if not found_propers:
|
||||
continue
|
||||
else:
|
||||
threading.currentThread().name = orig_thread_name + ' :: [' + cur_provider.name + ']'
|
||||
|
||||
try:
|
||||
found_propers = cur_provider.find_propers(search_date=aired_since_shows, shows=recent_shows,
|
||||
anime=recent_anime)
|
||||
except exceptions.AuthException as e:
|
||||
logger.log(u'Authentication error: ' + ex(e), logger.ERROR)
|
||||
continue
|
||||
except Exception as e:
|
||||
logger.log(u'Error while searching ' + cur_provider.name + ', skipping: ' + ex(e), logger.ERROR)
|
||||
logger.log(traceback.format_exc(), logger.ERROR)
|
||||
continue
|
||||
finally:
|
||||
threading.currentThread().name = orig_thread_name
|
||||
logger.log(u'Searching for new PROPER releases')
|
||||
|
||||
try:
|
||||
found_propers = cur_provider.find_propers(search_date=aired_since_shows, shows=recent_shows,
|
||||
anime=recent_anime)
|
||||
except exceptions.AuthException as e:
|
||||
logger.log(u'Authentication error: ' + ex(e), logger.ERROR)
|
||||
continue
|
||||
except Exception as e:
|
||||
logger.log(u'Error while searching ' + cur_provider.name + ', skipping: ' + ex(e), logger.ERROR)
|
||||
logger.log(traceback.format_exc(), logger.ERROR)
|
||||
continue
|
||||
finally:
|
||||
threading.currentThread().name = orig_thread_name
|
||||
|
||||
# if they haven't been added by a different provider than add the proper to the list
|
||||
count = 0
|
||||
|
@ -162,7 +230,7 @@ def _get_proper_list(aired_since_shows, recent_shows, recent_anime):
|
|||
parse_result.is_anime,
|
||||
check_is_repack=True)
|
||||
x.is_internal = parse_result.extra_info_no_name() and \
|
||||
re.search(r'\binternal\b', parse_result.extra_info_no_name(), flags=re.I)
|
||||
re.search(r'\binternal\b', parse_result.extra_info_no_name(), flags=re.I)
|
||||
x.codec = _get_codec(parse_result.extra_info_no_name())
|
||||
propers[name] = x
|
||||
count += 1
|
||||
|
@ -255,11 +323,12 @@ def _get_proper_list(aired_since_shows, recent_shows, recent_anime):
|
|||
except (StandardError, Exception):
|
||||
extra_info = None
|
||||
|
||||
old_proper_level, old_is_internal, old_codec = get_old_proper_level(parse_result.show, cur_proper.indexer,
|
||||
cur_proper.indexerid, cur_proper.season,
|
||||
parse_result.episode_numbers, old_status,
|
||||
cur_proper.quality, extra_info,
|
||||
cur_proper.version, cur_proper.is_anime)
|
||||
old_proper_level, old_is_internal, old_codec, old_extra_no_name, old_name = \
|
||||
get_old_proper_level(parse_result.show, cur_proper.indexer, cur_proper.indexerid, cur_proper.season,
|
||||
parse_result.episode_numbers, old_status, cur_proper.quality, extra_info,
|
||||
cur_proper.version, cur_proper.is_anime)
|
||||
|
||||
old_name = (old_name, sql_results[0]['release_name'])[old_name in ('', None)]
|
||||
if cur_proper.proper_level < old_proper_level:
|
||||
continue
|
||||
elif cur_proper.proper_level == old_proper_level:
|
||||
|
@ -273,11 +342,20 @@ def _get_proper_list(aired_since_shows, recent_shows, recent_anime):
|
|||
log_same_grp = 'Skipping proper from release group: [%s], does not match existing release group: [%s] for [%s]'\
|
||||
% (cur_proper.release_group, old_release_group, cur_proper.name)
|
||||
|
||||
is_web = (old_quality in (Quality.HDWEBDL, Quality.FULLHDWEBDL, Quality.UHD4KWEB) or
|
||||
(old_quality == Quality.SDTV and re.search(r'\Wweb.?(dl|rip|.[hx]26[45])\W',
|
||||
str(sql_results[0]['release_name']), re.I)))
|
||||
|
||||
if is_web:
|
||||
old_webdl_type = get_webdl_type(old_extra_no_name, old_name)
|
||||
new_webdl_type = get_webdl_type(cur_proper.extra_info_no_name(), cur_proper.name)
|
||||
if old_webdl_type != new_webdl_type:
|
||||
logger.log('Skipping proper webdl source: [%s], does not match existing webdl source: [%s] for [%s]'
|
||||
% (old_webdl_type, new_webdl_type, cur_proper.name), logger.DEBUG)
|
||||
continue
|
||||
|
||||
# for webldls, prevent propers from different groups
|
||||
if sickbeard.PROPERS_WEBDL_ONEGRP and \
|
||||
(old_quality in (Quality.HDWEBDL, Quality.FULLHDWEBDL, Quality.UHD4KWEB) or
|
||||
(old_quality == Quality.SDTV and re.search(r'\Wweb.?(dl|rip|.[hx]26[45])\W', str(sql_results[0]['release_name']), re.I))) and \
|
||||
cur_proper.release_group != old_release_group:
|
||||
if sickbeard.PROPERS_WEBDL_ONEGRP and is_web and cur_proper.release_group != old_release_group:
|
||||
logger.log(log_same_grp, logger.DEBUG)
|
||||
continue
|
||||
|
||||
|
@ -375,6 +453,46 @@ def _download_propers(proper_list):
|
|||
search.snatch_episode(result, SNATCHED_PROPER)
|
||||
|
||||
|
||||
def get_needed_qualites(needed=None):
|
||||
if not isinstance(needed, neededQualities):
|
||||
needed = neededQualities()
|
||||
if not sickbeard.DOWNLOAD_PROPERS or needed.all_needed:
|
||||
return needed
|
||||
|
||||
age_shows, age_anime = sickbeard.BACKLOG_DAYS + 2, 14
|
||||
aired_since_shows = datetime.datetime.today() - datetime.timedelta(days=age_shows)
|
||||
aired_since_anime = datetime.datetime.today() - datetime.timedelta(days=age_anime)
|
||||
|
||||
my_db = db.DBConnection()
|
||||
sql_results = my_db.select(
|
||||
'SELECT DISTINCT s.indexer, s.indexer_id, e.season, e.episode FROM history as h' +
|
||||
' INNER JOIN tv_episodes AS e ON (h.showid == e.showid AND h.season == e.season AND h.episode == e.episode)' +
|
||||
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
|
||||
' WHERE h.date >= %s' % min(aired_since_shows, aired_since_anime).strftime(dateFormat) +
|
||||
' AND (%s)' % ' OR '.join(['h.action LIKE "%%%02d"' % x for x in SNATCHED_ANY + [DOWNLOADED, FAILED]])
|
||||
)
|
||||
|
||||
for sql_episode in sql_results:
|
||||
if needed.all_needed:
|
||||
break
|
||||
try:
|
||||
show = helpers.find_show_by_id(
|
||||
sickbeard.showList, {int(sql_episode['indexer']): int(sql_episode['indexer_id'])})
|
||||
except MultipleShowObjectsException:
|
||||
continue
|
||||
if show:
|
||||
needed.check_needed_types(show)
|
||||
if needed.all_show_qualities_needed(show) or needed.all_qualities_needed:
|
||||
continue
|
||||
ep_obj = show.getEpisode(season=sql_episode['season'], episode=sql_episode['episode'])
|
||||
if ep_obj:
|
||||
ep_status, ep_quality = Quality.splitCompositeStatus(ep_obj.status)
|
||||
if ep_status in SNATCHED_ANY + [DOWNLOADED, ARCHIVED]:
|
||||
needed.check_needed_qualities([ep_quality])
|
||||
|
||||
return needed
|
||||
|
||||
|
||||
def _recent_history(aired_since_shows, aired_since_anime):
|
||||
|
||||
recent_shows, recent_anime = [], []
|
||||
|
@ -418,19 +536,23 @@ def _set_last_proper_search(when):
|
|||
|
||||
if 0 == len(sql_results):
|
||||
my_db.action('INSERT INTO info (last_backlog, last_indexer, last_proper_search) VALUES (?,?,?)',
|
||||
[0, 0, str(when)])
|
||||
[0, 0, sbdatetime.totimestamp(when)])
|
||||
else:
|
||||
my_db.action('UPDATE info SET last_proper_search=%s' % when)
|
||||
my_db.action('UPDATE info SET last_proper_search=%s' % sbdatetime.totimestamp(when))
|
||||
|
||||
|
||||
def _get_last_proper_search():
|
||||
def next_proper_timeleft():
|
||||
return sickbeard.properFinderScheduler.timeLeft()
|
||||
|
||||
|
||||
def get_last_proper_search():
|
||||
|
||||
my_db = db.DBConnection()
|
||||
sql_results = my_db.select('SELECT * FROM info')
|
||||
|
||||
try:
|
||||
last_proper_search = datetime.date.fromordinal(int(sql_results[0]['last_proper_search']))
|
||||
last_proper_search = int(sql_results[0]['last_proper_search'])
|
||||
except (StandardError, Exception):
|
||||
return datetime.date.fromordinal(1)
|
||||
return 1
|
||||
|
||||
return last_proper_search
|
||||
|
|
|
@ -65,6 +65,8 @@ class AlphaRatioProvider(generic.TorrentProvider):
|
|||
search_url = self.urls['search'] % (search_string, ('&freetorrent=1', '')[not self.freeleech])
|
||||
|
||||
html = self.get_url(search_url)
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
||||
cnt = len(items[mode])
|
||||
try:
|
||||
|
|
|
@ -38,7 +38,7 @@ class AnizbProvider(generic.NZBProvider):
|
|||
for params in search_params[mode]:
|
||||
|
||||
search_url = '%sapi/%s' % (self.url, params and (('?q=%s', '?q=%(q)s')['q' in params] % params) or '')
|
||||
data = self.cache.getRSSFeed(search_url)
|
||||
data = self.cache.get_rss(search_url)
|
||||
time.sleep(1.1)
|
||||
|
||||
cnt = len(results)
|
||||
|
|
|
@ -73,6 +73,8 @@ class BeyondHDProvider(generic.TorrentProvider):
|
|||
search_url += self.urls['search'] % re.sub('[.\s]+', ' ', search_string)
|
||||
|
||||
data_json = self.get_url(search_url, json=True)
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
||||
cnt = len(items[mode])
|
||||
if data_json and 'results' in data_json and self._check_auth_from_data(data_json):
|
||||
|
|
|
@ -71,6 +71,8 @@ class BitHDTVProvider(generic.TorrentProvider):
|
|||
search_url = self.urls['search'] % (search_string, self._categories_string(mode))
|
||||
|
||||
html = self.get_url(search_url, timeout=90)
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
||||
cnt = len(items[mode])
|
||||
try:
|
||||
|
|
|
@ -64,6 +64,8 @@ class BitmetvProvider(generic.TorrentProvider):
|
|||
search_url = self.urls['search'] % (self._categories_string(mode, 'cat=%s'), search_string)
|
||||
|
||||
html = self.get_url(search_url)
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
||||
cnt = len(items[mode])
|
||||
try:
|
||||
|
|
|
@ -105,6 +105,8 @@ class BlutopiaProvider(generic.TorrentProvider):
|
|||
self.token, '+'.join(search_string.split()), self._categories_string(mode, ''), '', '', '')
|
||||
|
||||
resp = self.get_url(search_url, json=True)
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
||||
cnt = len(items[mode])
|
||||
try:
|
||||
|
|
|
@ -56,6 +56,7 @@ class BTNProvider(generic.TorrentProvider):
|
|||
self.ua = self.session.headers['User-Agent']
|
||||
self.reject_m2ts = False
|
||||
self.cache = BTNCache(self)
|
||||
self.has_limit = True
|
||||
|
||||
def _authorised(self, **kwargs):
|
||||
|
||||
|
@ -67,6 +68,15 @@ class BTNProvider(generic.TorrentProvider):
|
|||
raise AuthException('Must set Api key or Username/Password for %s in config provider options' % self.name)
|
||||
return True
|
||||
|
||||
def _check_response(self, data, url, post_data=None, post_json=None):
|
||||
if not self.should_skip(log_warning=False):
|
||||
if data and 'Call Limit' in data:
|
||||
self.tmr_limit_update('1', 'h', '150/hr %s' % data)
|
||||
self.log_failure_url(url, post_data, post_json)
|
||||
else:
|
||||
logger.log(u'Action prematurely ended. %(prov)s server error response = %(desc)s' %
|
||||
{'prov': self.name, 'desc': data}, logger.WARNING)
|
||||
|
||||
def _search_provider(self, search_params, age=0, **kwargs):
|
||||
|
||||
self._authorised()
|
||||
|
@ -93,21 +103,19 @@ class BTNProvider(generic.TorrentProvider):
|
|||
self.api_key, json.dumps(param_dct), items_per_page, offset))
|
||||
|
||||
try:
|
||||
response = None
|
||||
response, error_text = None, None
|
||||
if api_up and self.api_key:
|
||||
self.session.headers['Content-Type'] = 'application/json-rpc'
|
||||
response = helpers.getURL(
|
||||
self.url_api, post_data=json_rpc(params), session=self.session, json=True)
|
||||
if not response:
|
||||
api_up = False
|
||||
results = self.html(mode, search_string, results)
|
||||
error_text = response['error']['message']
|
||||
logger.log(
|
||||
('Call Limit' in error_text
|
||||
and u'Action aborted because the %(prov)s 150 calls/hr limit was reached'
|
||||
or u'Action prematurely ended. %(prov)s server error response = %(desc)s') %
|
||||
{'prov': self.name, 'desc': error_text}, logger.WARNING)
|
||||
return results
|
||||
response = self.get_url(self.url_api, post_data=json_rpc(params), json=True)
|
||||
# response = {'error': {'message': 'Call Limit Exceeded Test'}}
|
||||
error_text = response['error']['message']
|
||||
api_up = False
|
||||
if 'Propers' == mode:
|
||||
return results
|
||||
results = self.html(mode, search_string, results)
|
||||
if not results:
|
||||
self._check_response(error_text, self.url_api, post_data=json_rpc(params))
|
||||
return results
|
||||
except AuthException:
|
||||
logger.log('API looks to be down, add un/pw config detail to be used as a fallback', logger.WARNING)
|
||||
except (KeyError, Exception):
|
||||
|
@ -115,7 +123,7 @@ class BTNProvider(generic.TorrentProvider):
|
|||
|
||||
data_json = response and 'result' in response and response['result'] or {}
|
||||
if data_json:
|
||||
|
||||
self.tmr_limit_count = 0
|
||||
found_torrents = 'torrents' in data_json and data_json['torrents'] or {}
|
||||
|
||||
# We got something, we know the API sends max 1000 results at a time.
|
||||
|
@ -134,15 +142,10 @@ class BTNProvider(generic.TorrentProvider):
|
|||
for page in range(1, pages_needed + 1):
|
||||
|
||||
try:
|
||||
response = helpers.getURL(
|
||||
self.url_api, json=True, session=self.session,
|
||||
post_data=json_rpc(params, results_per_page, page * results_per_page))
|
||||
post_data = json_rpc(params, results_per_page, page * results_per_page)
|
||||
response = self.get_url(self.url_api, json=True, post_data=post_data)
|
||||
error_text = response['error']['message']
|
||||
logger.log(
|
||||
('Call Limit' in error_text
|
||||
and u'Action prematurely ended because the %(prov)s 150 calls/hr limit was reached'
|
||||
or u'Action prematurely ended. %(prov)s server error response = %(desc)s') %
|
||||
{'prov': self.name, 'desc': error_text}, logger.WARNING)
|
||||
self._check_response(error_text, self.url_api, post_data=post_data)
|
||||
return results
|
||||
except (KeyError, Exception):
|
||||
data_json = response and 'result' in response and response['result'] or {}
|
||||
|
@ -150,6 +153,7 @@ class BTNProvider(generic.TorrentProvider):
|
|||
# Note that this these are individual requests and might time out individually.
|
||||
# This would result in 'gaps' in the results. There is no way to fix this though.
|
||||
if 'torrents' in data_json:
|
||||
self.tmr_limit_count = 0
|
||||
found_torrents.update(data_json['torrents'])
|
||||
|
||||
cnt = len(results)
|
||||
|
@ -176,7 +180,8 @@ class BTNProvider(generic.TorrentProvider):
|
|||
|
||||
if self.username and self.password:
|
||||
return super(BTNProvider, self)._authorised(
|
||||
post_params={'login': 'Log In!'}, logged_in=(lambda y='': 'casThe' in y[0:4096]))
|
||||
post_params={'login': 'Log In!'},
|
||||
logged_in=(lambda y='': 'casThe' in y[0:512] and '<title>Index' in y[0:512]))
|
||||
raise AuthException('Password or Username for %s is empty in config provider options' % self.name)
|
||||
|
||||
def html(self, mode, search_string, results):
|
||||
|
@ -197,7 +202,10 @@ class BTNProvider(generic.TorrentProvider):
|
|||
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
|
||||
search_url = self.urls['search'] % (search_string, self._categories_string(mode, 'filter_cat[%s]=1'))
|
||||
|
||||
html = helpers.getURL(search_url, session=self.session)
|
||||
html = self.get_url(search_url, use_tmr_limit=False)
|
||||
if self.should_skip(log_warning=False, use_tmr_limit=False):
|
||||
return results
|
||||
|
||||
cnt = len(results)
|
||||
try:
|
||||
if not html or self._has_no_results(html):
|
||||
|
|
|
@ -64,7 +64,7 @@ class BTSceneProvider(generic.TorrentProvider):
|
|||
|
||||
url = self.url
|
||||
response = self.get_url(url)
|
||||
if not response:
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
||||
form = re.findall('(?is)(<form[^>]+)', response)
|
||||
|
@ -84,6 +84,8 @@ class BTSceneProvider(generic.TorrentProvider):
|
|||
else url + self.urls['search'] % (urllib.quote_plus(search_string))
|
||||
|
||||
html = self.get_url(search_url)
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
||||
cnt = len(items[mode])
|
||||
try:
|
||||
|
|
|
@ -65,6 +65,8 @@ class DHProvider(generic.TorrentProvider):
|
|||
|
||||
html = self.get_url(self.urls['search'] % (
|
||||
'+'.join(search_string.split()), self._categories_string(mode), ('3', '0')[not self.freeleech]))
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
||||
cnt = len(items[mode])
|
||||
try:
|
||||
|
|
|
@ -62,6 +62,8 @@ class ETTVProvider(generic.TorrentProvider):
|
|||
self._categories_string(mode), ('%2B ', '')['Cache' == mode] + '.'.join(search_string.split()))
|
||||
|
||||
html = self.get_url(search_url)
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
||||
cnt = len(items[mode])
|
||||
try:
|
||||
|
@ -110,6 +112,9 @@ class ETTVProvider(generic.TorrentProvider):
|
|||
def get_data(self, url):
|
||||
result = None
|
||||
html = self.get_url(url, timeout=90)
|
||||
if self.should_skip():
|
||||
return result
|
||||
|
||||
try:
|
||||
result = re.findall('(?i)"(magnet:[^"]+?)">', html)[0]
|
||||
except IndexError:
|
||||
|
|
|
@ -83,6 +83,8 @@ class FanoProvider(generic.TorrentProvider):
|
|||
search_url = self.urls['search'] % (search_string, self._categories_string(mode))
|
||||
|
||||
html = self.get_url(search_url)
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
||||
cnt = len(items[mode])
|
||||
try:
|
||||
|
|
|
@ -62,6 +62,8 @@ class FLProvider(generic.TorrentProvider):
|
|||
|
||||
html = self.get_url(self.urls['search'] % ('+'.join(search_string.split()),
|
||||
self._categories_string(mode, template='cats[]=%s')))
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
||||
cnt = len(items[mode])
|
||||
try:
|
||||
|
|
|
@ -66,6 +66,8 @@ class FunFileProvider(generic.TorrentProvider):
|
|||
search_url = self.urls['search'] % (self._categories_string(mode), search_string)
|
||||
|
||||
html = self.get_url(search_url, timeout=self.url_timeout)
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
||||
cnt = len(items[mode])
|
||||
try:
|
||||
|
|
|
@ -27,6 +27,7 @@ import re
|
|||
import time
|
||||
import urlparse
|
||||
import threading
|
||||
import socket
|
||||
from urllib import quote_plus
|
||||
import zlib
|
||||
from base64 import b16encode, b32decode
|
||||
|
@ -45,13 +46,157 @@ from sickbeard.exceptions import SickBeardException, AuthException, ex
|
|||
from sickbeard.helpers import maybe_plural, remove_file_failed
|
||||
from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException
|
||||
from sickbeard.show_name_helpers import get_show_names_all_possible
|
||||
from sickbeard.sbdatetime import sbdatetime
|
||||
|
||||
|
||||
class HaltParseException(SickBeardException):
|
||||
"""Something requires the current processing to abort"""
|
||||
|
||||
|
||||
class GenericProvider:
|
||||
class ProviderFailTypes:
|
||||
http = 1
|
||||
connection = 2
|
||||
connection_timeout = 3
|
||||
timeout = 4
|
||||
other = 5
|
||||
limit = 6
|
||||
nodata = 7
|
||||
|
||||
names = {http: 'http', timeout: 'timeout',
|
||||
connection: 'connection', connection_timeout: 'connection_timeout',
|
||||
nodata: 'nodata', other: 'other', limit: 'limit'}
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
|
||||
class ProviderFail(object):
|
||||
def __init__(self, fail_type=ProviderFailTypes.other, code=None, fail_time=None):
|
||||
self.code = code
|
||||
self.fail_type = fail_type
|
||||
self.fail_time = (datetime.datetime.now(), fail_time)[isinstance(fail_time, datetime.datetime)]
|
||||
|
||||
|
||||
class ProviderFailList(object):
|
||||
def __init__(self, provider_name):
|
||||
self.provider_name = provider_name
|
||||
self._fails = []
|
||||
self.lock = threading.Lock()
|
||||
self.clear_old()
|
||||
self.load_list()
|
||||
self.last_save = datetime.datetime.now()
|
||||
self.dirty = False
|
||||
|
||||
@property
|
||||
def fails(self):
|
||||
return self._fails
|
||||
|
||||
@property
|
||||
def fails_sorted(self):
|
||||
fail_dict = {}
|
||||
b_d = {'count': 0}
|
||||
for e in self._fails:
|
||||
fail_date = e.fail_time.date()
|
||||
fail_hour = e.fail_time.time().hour
|
||||
date_time = datetime.datetime.combine(fail_date, datetime.time(hour=fail_hour))
|
||||
if ProviderFailTypes.names[e.fail_type] not in fail_dict.get(date_time, {}):
|
||||
default = {'date': str(fail_date), 'date_time': date_time, 'multirow': False}
|
||||
for et in ProviderFailTypes.names.itervalues():
|
||||
default[et] = b_d.copy()
|
||||
fail_dict.setdefault(date_time, default)[ProviderFailTypes.names[e.fail_type]]['count'] = 1
|
||||
else:
|
||||
fail_dict[date_time][ProviderFailTypes.names[e.fail_type]]['count'] += 1
|
||||
if ProviderFailTypes.http == e.fail_type:
|
||||
if e.code in fail_dict[date_time].get(ProviderFailTypes.names[e.fail_type],
|
||||
{'code': {}}).get('code', {}):
|
||||
fail_dict[date_time][ProviderFailTypes.names[e.fail_type]]['code'][e.code] += 1
|
||||
else:
|
||||
fail_dict[date_time][ProviderFailTypes.names[e.fail_type]].setdefault('code', {})[e.code] = 1
|
||||
|
||||
row_count = {}
|
||||
for (k, v) in fail_dict.iteritems():
|
||||
row_count.setdefault(v.get('date'), 0)
|
||||
if v.get('date') in row_count:
|
||||
row_count[v.get('date')] += 1
|
||||
for (k, v) in fail_dict.iteritems():
|
||||
if 1 < row_count.get(v.get('date')):
|
||||
fail_dict[k]['multirow'] = True
|
||||
|
||||
fail_list = sorted([fail_dict[k] for k in fail_dict.iterkeys()], key=lambda y: y.get('date_time'), reverse=True)
|
||||
|
||||
totals = {}
|
||||
for fail_date in set([fail.get('date') for fail in fail_list]):
|
||||
daytotals = {}
|
||||
for et in ProviderFailTypes.names.itervalues():
|
||||
daytotals.update({et: sum([x.get(et).get('count') for x in fail_list if fail_date == x.get('date')])})
|
||||
totals.update({fail_date: daytotals})
|
||||
for (fail_date, total) in totals.iteritems():
|
||||
for i, item in enumerate(fail_list):
|
||||
if fail_date == item.get('date'):
|
||||
if item.get('multirow'):
|
||||
fail_list[i:i] = [item.copy()]
|
||||
for et in ProviderFailTypes.names.itervalues():
|
||||
fail_list[i][et] = {'count': total[et]}
|
||||
if et == ProviderFailTypes.names[ProviderFailTypes.http]:
|
||||
fail_list[i][et]['code'] = {}
|
||||
break
|
||||
|
||||
return fail_list
|
||||
|
||||
def add_fail(self, fail):
|
||||
if isinstance(fail, ProviderFail):
|
||||
with self.lock:
|
||||
self.dirty = True
|
||||
self._fails.append(fail)
|
||||
logger.log('Adding fail.%s for %s' % (ProviderFailTypes.names.get(
|
||||
fail.fail_type, ProviderFailTypes.names[ProviderFailTypes.other]), self.provider_name()),
|
||||
logger.DEBUG)
|
||||
self.save_list()
|
||||
|
||||
def save_list(self):
|
||||
if self.dirty:
|
||||
self.clear_old()
|
||||
with self.lock:
|
||||
my_db = db.DBConnection('cache.db')
|
||||
cl = []
|
||||
for f in self._fails:
|
||||
cl.append(['INSERT OR IGNORE INTO provider_fails (prov_name, fail_type, fail_code, fail_time) '
|
||||
'VALUES (?,?,?,?)', [self.provider_name(), f.fail_type, f.code,
|
||||
sbdatetime.totimestamp(f.fail_time)]])
|
||||
self.dirty = False
|
||||
if cl:
|
||||
my_db.mass_action(cl)
|
||||
self.last_save = datetime.datetime.now()
|
||||
|
||||
def load_list(self):
|
||||
with self.lock:
|
||||
try:
|
||||
my_db = db.DBConnection('cache.db')
|
||||
if my_db.hasTable('provider_fails'):
|
||||
results = my_db.select('SELECT * FROM provider_fails WHERE prov_name = ?', [self.provider_name()])
|
||||
self._fails = []
|
||||
for r in results:
|
||||
try:
|
||||
self._fails.append(ProviderFail(
|
||||
fail_type=helpers.tryInt(r['fail_type']), code=helpers.tryInt(r['fail_code']),
|
||||
fail_time=datetime.datetime.fromtimestamp(helpers.tryInt(r['fail_time']))))
|
||||
except (StandardError, Exception):
|
||||
continue
|
||||
except (StandardError, Exception):
|
||||
pass
|
||||
|
||||
def clear_old(self):
|
||||
with self.lock:
|
||||
try:
|
||||
my_db = db.DBConnection('cache.db')
|
||||
if my_db.hasTable('provider_fails'):
|
||||
time_limit = sbdatetime.totimestamp(datetime.datetime.now() - datetime.timedelta(days=28))
|
||||
my_db.action('DELETE FROM provider_fails WHERE fail_time < ?', [time_limit])
|
||||
except (StandardError, Exception):
|
||||
pass
|
||||
|
||||
|
||||
class GenericProvider(object):
|
||||
NZB = 'nzb'
|
||||
TORRENT = 'torrent'
|
||||
|
||||
|
@ -86,6 +231,321 @@ class GenericProvider:
|
|||
# 'Chrome/32.0.1700.107 Safari/537.36'}
|
||||
'User-Agent': USER_AGENT}
|
||||
|
||||
self._failure_count = 0
|
||||
self._failure_time = None
|
||||
self.fails = ProviderFailList(self.get_id)
|
||||
self._tmr_limit_count = 0
|
||||
self._tmr_limit_time = None
|
||||
self._tmr_limit_wait = None
|
||||
self._last_fail_type = None
|
||||
self.has_limit = False
|
||||
self.fail_times = {1: (0, 15), 2: (0, 30), 3: (1, 0), 4: (2, 0), 5: (3, 0), 6: (6, 0), 7: (12, 0), 8: (24, 0)}
|
||||
self._load_fail_values()
|
||||
|
||||
def _load_fail_values(self):
|
||||
if hasattr(sickbeard, 'DATA_DIR'):
|
||||
my_db = db.DBConnection('cache.db')
|
||||
if my_db.hasTable('provider_fails_count'):
|
||||
r = my_db.select('SELECT * FROM provider_fails_count WHERE prov_name = ?', [self.get_id()])
|
||||
if r:
|
||||
self._failure_count = helpers.tryInt(r[0]['failure_count'], 0)
|
||||
if r[0]['failure_time']:
|
||||
self._failure_time = datetime.datetime.fromtimestamp(r[0]['failure_time'])
|
||||
else:
|
||||
self._failure_time = None
|
||||
self._tmr_limit_count = helpers.tryInt(r[0]['tmr_limit_count'], 0)
|
||||
if r[0]['tmr_limit_time']:
|
||||
self._tmr_limit_time = datetime.datetime.fromtimestamp(r[0]['tmr_limit_time'])
|
||||
else:
|
||||
self._tmr_limit_time = None
|
||||
if r[0]['tmr_limit_wait']:
|
||||
self._tmr_limit_wait = datetime.timedelta(seconds=helpers.tryInt(r[0]['tmr_limit_wait'], 0))
|
||||
else:
|
||||
self._tmr_limit_wait = None
|
||||
self._last_fail_type = self.last_fail
|
||||
|
||||
def _save_fail_value(self, field, value):
|
||||
my_db = db.DBConnection('cache.db')
|
||||
if my_db.hasTable('provider_fails_count'):
|
||||
r = my_db.action('UPDATE provider_fails_count SET %s = ? WHERE prov_name = ?' % field,
|
||||
[value, self.get_id()])
|
||||
if 0 == r.rowcount:
|
||||
my_db.action('REPLACE INTO provider_fails_count (prov_name, %s) VALUES (?,?)' % field,
|
||||
[self.get_id(), value])
|
||||
|
||||
@property
|
||||
def last_fail(self):
|
||||
try:
|
||||
return sorted(self.fails.fails, key=lambda x: x.fail_time, reverse=True)[0].fail_type
|
||||
except (StandardError, Exception):
|
||||
return None
|
||||
|
||||
@property
|
||||
def failure_count(self):
|
||||
return self._failure_count
|
||||
|
||||
@failure_count.setter
|
||||
def failure_count(self, value):
|
||||
changed_val = self._failure_count != value
|
||||
self._failure_count = value
|
||||
if changed_val:
|
||||
self._save_fail_value('failure_count', value)
|
||||
|
||||
@property
|
||||
def failure_time(self):
|
||||
return self._failure_time
|
||||
|
||||
@failure_time.setter
|
||||
def failure_time(self, value):
|
||||
if None is value or isinstance(value, datetime.datetime):
|
||||
changed_val = self._failure_time != value
|
||||
self._failure_time = value
|
||||
if changed_val:
|
||||
self._save_fail_value('failure_time', (sbdatetime.totimestamp(value), value)[None is value])
|
||||
|
||||
@property
|
||||
def tmr_limit_count(self):
|
||||
return self._tmr_limit_count
|
||||
|
||||
@tmr_limit_count.setter
|
||||
def tmr_limit_count(self, value):
|
||||
changed_val = self._tmr_limit_count != value
|
||||
self._tmr_limit_count = value
|
||||
if changed_val:
|
||||
self._save_fail_value('tmr_limit_count', value)
|
||||
|
||||
@property
|
||||
def tmr_limit_time(self):
|
||||
return self._tmr_limit_time
|
||||
|
||||
@tmr_limit_time.setter
|
||||
def tmr_limit_time(self, value):
|
||||
if None is value or isinstance(value, datetime.datetime):
|
||||
changed_val = self._tmr_limit_time != value
|
||||
self._tmr_limit_time = value
|
||||
if changed_val:
|
||||
self._save_fail_value('tmr_limit_time', (sbdatetime.totimestamp(value), value)[None is value])
|
||||
|
||||
@property
|
||||
def max_index(self):
|
||||
return len(self.fail_times)
|
||||
|
||||
@property
|
||||
def tmr_limit_wait(self):
|
||||
return self._tmr_limit_wait
|
||||
|
||||
@tmr_limit_wait.setter
|
||||
def tmr_limit_wait(self, value):
|
||||
if isinstance(getattr(self, 'fails', None), ProviderFailList) and isinstance(value, datetime.timedelta):
|
||||
self.fails.add_fail(ProviderFail(fail_type=ProviderFailTypes.limit))
|
||||
changed_val = self._tmr_limit_wait != value
|
||||
self._tmr_limit_wait = value
|
||||
if changed_val:
|
||||
if None is value:
|
||||
self._save_fail_value('tmr_limit_wait', value)
|
||||
elif isinstance(value, datetime.timedelta):
|
||||
self._save_fail_value('tmr_limit_wait', value.total_seconds())
|
||||
|
||||
def fail_time_index(self, base_limit=2):
|
||||
i = self.failure_count - base_limit
|
||||
return (i, self.max_index)[i >= self.max_index]
|
||||
|
||||
def tmr_limit_update(self, period, unit, desc):
|
||||
self.tmr_limit_time = datetime.datetime.now()
|
||||
self.tmr_limit_count += 1
|
||||
limit_set = False
|
||||
if None not in (period, unit):
|
||||
limit_set = True
|
||||
if unit in ('s', 'sec', 'secs', 'seconds', 'second'):
|
||||
self.tmr_limit_wait = datetime.timedelta(seconds=helpers.tryInt(period))
|
||||
elif unit in ('m', 'min', 'mins', 'minutes', 'minute'):
|
||||
self.tmr_limit_wait = datetime.timedelta(minutes=helpers.tryInt(period))
|
||||
elif unit in ('h', 'hr', 'hrs', 'hours', 'hour'):
|
||||
self.tmr_limit_wait = datetime.timedelta(hours=helpers.tryInt(period))
|
||||
elif unit in ('d', 'days', 'day'):
|
||||
self.tmr_limit_wait = datetime.timedelta(days=helpers.tryInt(period))
|
||||
else:
|
||||
limit_set = False
|
||||
if not limit_set:
|
||||
time_index = self.fail_time_index(base_limit=0)
|
||||
self.tmr_limit_wait = self.wait_time(time_index)
|
||||
logger.log('Request limit reached. Waiting for %s until next retry. Message: %s' %
|
||||
(self.tmr_limit_wait, desc or 'none found'), logger.WARNING)
|
||||
|
||||
def wait_time(self, time_index=None):
|
||||
"""
|
||||
Return a suitable wait time, selected by parameter, or based on the current failure count
|
||||
|
||||
:param time_index: A key value index into the fail_times dict, or selects using failure count if None
|
||||
:type time_index: Integer
|
||||
:return: Time
|
||||
:rtype: Timedelta
|
||||
"""
|
||||
if None is time_index:
|
||||
time_index = self.fail_time_index()
|
||||
return datetime.timedelta(hours=self.fail_times[time_index][0], minutes=self.fail_times[time_index][1])
|
||||
|
||||
def fail_newest_delta(self):
|
||||
"""
|
||||
Return how long since most recent failure
|
||||
:return: Period since most recent failure on record
|
||||
:rtype: timedelta
|
||||
"""
|
||||
return datetime.datetime.now() - self.failure_time
|
||||
|
||||
def is_waiting(self):
|
||||
return self.fail_newest_delta() < self.wait_time()
|
||||
|
||||
def valid_tmr_time(self):
|
||||
return isinstance(self.tmr_limit_wait, datetime.timedelta) and \
|
||||
isinstance(self.tmr_limit_time, datetime.datetime)
|
||||
|
||||
@property
|
||||
def get_next_try_time(self):
|
||||
n = None
|
||||
h = datetime.timedelta(seconds=0)
|
||||
f = datetime.timedelta(seconds=0)
|
||||
if self.valid_tmr_time():
|
||||
h = self.tmr_limit_time + self.tmr_limit_wait - datetime.datetime.now()
|
||||
if 3 <= self.failure_count and isinstance(self.failure_time, datetime.datetime) and self.is_waiting():
|
||||
h = self.failure_time + self.wait_time() - datetime.datetime.now()
|
||||
if datetime.timedelta(seconds=0) < max((h, f)):
|
||||
n = max((h, f))
|
||||
return n
|
||||
|
||||
def retry_next(self):
|
||||
if self.valid_tmr_time():
|
||||
self.tmr_limit_time = datetime.datetime.now() - self.tmr_limit_wait
|
||||
if 3 <= self.failure_count and isinstance(self.failure_time, datetime.datetime) and self.is_waiting():
|
||||
self.failure_time = datetime.datetime.now() - self.wait_time()
|
||||
|
||||
@staticmethod
|
||||
def fmt_delta(delta):
|
||||
return str(delta).rsplit('.')[0]
|
||||
|
||||
def should_skip(self, log_warning=True, use_tmr_limit=True):
|
||||
"""
|
||||
Determine if a subsequent server request should be skipped. The result of this logic is based on most recent
|
||||
server connection activity including, exhausted request limits, and counting connect failures to determine a
|
||||
"cool down" period before recommending reconnection attempts; by returning False.
|
||||
:param log_warning: Output to log if True (default) otherwise set False for no output.
|
||||
:type log_warning: Boolean
|
||||
:param use_tmr_limit: Setting this to False will ignore a tmr limit being reached and will instead return False.
|
||||
:type use_tmr_limit: Boolean
|
||||
:return: True for any known issue that would prevent a subsequent server connection, otherwise False.
|
||||
:rtype: Boolean
|
||||
"""
|
||||
if self.valid_tmr_time():
|
||||
time_left = self.tmr_limit_time + self.tmr_limit_wait - datetime.datetime.now()
|
||||
if time_left > datetime.timedelta(seconds=0):
|
||||
if log_warning:
|
||||
# Ensure provider name output (e.g. when displaying config/provs) instead of e.g. thread "Tornado"
|
||||
prepend = ('[%s] :: ' % self.name, '')[any([x.name in threading.currentThread().getName()
|
||||
for x in sickbeard.providers.sortedProviderList()])]
|
||||
logger.log('%sToo many requests reached at %s, waiting for %s' % (
|
||||
prepend, self.fmt_delta(self.tmr_limit_time), self.fmt_delta(time_left)), logger.WARNING)
|
||||
return use_tmr_limit
|
||||
else:
|
||||
self.tmr_limit_time = None
|
||||
self.tmr_limit_wait = None
|
||||
if 3 <= self.failure_count:
|
||||
if None is self.failure_time:
|
||||
self.failure_time = datetime.datetime.now()
|
||||
if self.is_waiting():
|
||||
if log_warning:
|
||||
time_left = self.wait_time() - self.fail_newest_delta()
|
||||
logger.log('Failed %s times, skipping provider for %s, last failure at %s with fail type: %s' % (
|
||||
self.failure_count, self.fmt_delta(time_left), self.fmt_delta(self.failure_time),
|
||||
ProviderFailTypes.names.get(
|
||||
self.last_fail, ProviderFailTypes.names[ProviderFailTypes.other])), logger.WARNING)
|
||||
return True
|
||||
return False
|
||||
|
||||
def inc_failure_count(self, *args, **kwargs):
|
||||
fail_type = ('fail_type' in kwargs and kwargs['fail_type'].fail_type) or \
|
||||
(isinstance(args, tuple) and isinstance(args[0], ProviderFail) and args[0].fail_type)
|
||||
if not isinstance(self.failure_time, datetime.datetime) or \
|
||||
fail_type != self._last_fail_type or \
|
||||
self.fail_newest_delta() > datetime.timedelta(seconds=3):
|
||||
self.failure_count += 1
|
||||
self.failure_time = datetime.datetime.now()
|
||||
self._last_fail_type = fail_type
|
||||
self.fails.add_fail(*args, **kwargs)
|
||||
else:
|
||||
logger.log('%s: Not logging same failure within 3 seconds' % self.name, logger.DEBUG)
|
||||
|
||||
def get_url(self, url, skip_auth=False, use_tmr_limit=True, *args, **kwargs):
|
||||
"""
|
||||
Return data from a URI with a possible check for authentication prior to the data fetch.
|
||||
Raised errors and no data in responses are tracked for making future logic decisions.
|
||||
|
||||
:param url: Address where to fetch data from
|
||||
:type url: String
|
||||
:param skip_auth: Skip authentication check of provider if True
|
||||
:type skip_auth: Boolean
|
||||
:param use_tmr_limit: An API limit can be +ve before a fetch, but unwanted, set False to short should_skip
|
||||
:type use_tmr_limit: Boolean
|
||||
:param args: params to pass-through to getURL
|
||||
:type args:
|
||||
:param kwargs: keyword params to pass-through to getURL
|
||||
:type kwargs:
|
||||
:return: None or data fetched from URL
|
||||
:rtype: String or Nonetype
|
||||
"""
|
||||
data = None
|
||||
|
||||
# check for auth
|
||||
if (not skip_auth and not (self.is_public_access()
|
||||
and type(self).__name__ not in ['TorrentRssProvider']) and not self._authorised()) \
|
||||
or self.should_skip(use_tmr_limit=use_tmr_limit):
|
||||
return
|
||||
|
||||
kwargs['raise_exceptions'] = True
|
||||
kwargs['raise_status_code'] = True
|
||||
for k, v in dict(headers=self.headers, hooks=dict(response=self.cb_response), session=self.session).items():
|
||||
kwargs.setdefault(k, v)
|
||||
|
||||
post_data = kwargs.get('post_data')
|
||||
post_json = kwargs.get('post_json')
|
||||
|
||||
# noinspection PyUnusedLocal
|
||||
log_failure_url = False
|
||||
try:
|
||||
data = helpers.getURL(url, *args, **kwargs)
|
||||
if data:
|
||||
if 0 != self.failure_count:
|
||||
logger.log('Unblocking provider: %s' % self.get_id(), logger.DEBUG)
|
||||
self.failure_count = 0
|
||||
self.failure_time = None
|
||||
else:
|
||||
self.inc_failure_count(ProviderFail(fail_type=ProviderFailTypes.nodata))
|
||||
log_failure_url = True
|
||||
except requests.exceptions.HTTPError as e:
|
||||
self.inc_failure_count(ProviderFail(fail_type=ProviderFailTypes.http, code=e.response.status_code))
|
||||
except requests.exceptions.ConnectionError:
|
||||
self.inc_failure_count(ProviderFail(fail_type=ProviderFailTypes.connection))
|
||||
except requests.exceptions.ReadTimeout:
|
||||
self.inc_failure_count(ProviderFail(fail_type=ProviderFailTypes.timeout))
|
||||
except (requests.exceptions.Timeout, socket.timeout):
|
||||
self.inc_failure_count(ProviderFail(fail_type=ProviderFailTypes.connection_timeout))
|
||||
except (StandardError, Exception) as e:
|
||||
log_failure_url = True
|
||||
self.inc_failure_count(ProviderFail(fail_type=ProviderFailTypes.other))
|
||||
|
||||
self.fails.save_list()
|
||||
if log_failure_url:
|
||||
self.log_failure_url(url, post_data, post_json)
|
||||
return data
|
||||
|
||||
def log_failure_url(self, url, post_data=None, post_json=None):
|
||||
if self.should_skip(log_warning=False):
|
||||
post = []
|
||||
if post_data:
|
||||
post += [' .. Post params: [%s]' % '&'.join([post_data])]
|
||||
if post_json:
|
||||
post += [' .. Json params: [%s]' % '&'.join([post_json])]
|
||||
logger.log('Failure URL: %s%s' % (url, ''.join(post)), logger.WARNING)
|
||||
|
||||
def get_id(self):
|
||||
return GenericProvider.make_id(self.name)
|
||||
|
||||
|
@ -152,19 +612,6 @@ class GenericProvider:
|
|||
self.session.response = dict(url=r.url, status_code=r.status_code, elapsed=r.elapsed, from_cache=r.from_cache)
|
||||
return r
|
||||
|
||||
def get_url(self, url, post_data=None, params=None, timeout=30, json=False):
|
||||
"""
|
||||
By default this is just a simple urlopen call but this method should be overridden
|
||||
for providers with special URL requirements (like cookies)
|
||||
"""
|
||||
|
||||
# check for auth
|
||||
if not self._authorised():
|
||||
return
|
||||
|
||||
return helpers.getURL(url, post_data=post_data, params=params, headers=self.headers, timeout=timeout,
|
||||
session=self.session, json=json, hooks=dict(response=self.cb_response))
|
||||
|
||||
def download_result(self, result):
|
||||
"""
|
||||
Save the result to disk.
|
||||
|
@ -428,9 +875,13 @@ class GenericProvider:
|
|||
|
||||
results = {}
|
||||
item_list = []
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
||||
searched_scene_season = None
|
||||
for ep_obj in episodes:
|
||||
if self.should_skip(log_warning=False):
|
||||
break
|
||||
# search cache for episode result
|
||||
cache_result = self.cache.searchCache(ep_obj, manual_search)
|
||||
if cache_result:
|
||||
|
@ -457,6 +908,8 @@ class GenericProvider:
|
|||
|
||||
for cur_param in search_params:
|
||||
item_list += self._search_provider(cur_param, search_mode=search_mode, epcount=len(episodes))
|
||||
if self.should_skip():
|
||||
break
|
||||
|
||||
return self.finish_find_search_results(show, episodes, search_mode, manual_search, results, item_list)
|
||||
|
||||
|
@ -649,10 +1102,11 @@ class GenericProvider:
|
|||
:param count: count of successfully processed items
|
||||
:param url: source url of item(s)
|
||||
"""
|
||||
str1, thing, str3 = (('', '%s item' % mode.lower(), ''), (' usable', 'proper', ' found'))['Propers' == mode]
|
||||
logger.log(u'%s %s in response from %s' % (('No' + str1, count)[0 < count], (
|
||||
'%s%s%s%s' % (('', 'freeleech ')[getattr(self, 'freeleech', False)], thing, maybe_plural(count), str3)),
|
||||
re.sub('(\s)\s+', r'\1', url)))
|
||||
if not self.should_skip():
|
||||
str1, thing, str3 = (('', '%s item' % mode.lower(), ''), (' usable', 'proper', ' found'))['Propers' == mode]
|
||||
logger.log(u'%s %s in response from %s' % (('No' + str1, count)[0 < count], (
|
||||
'%s%s%s%s' % (('', 'freeleech ')[getattr(self, 'freeleech', False)], thing, maybe_plural(count), str3)),
|
||||
re.sub('(\s)\s+', r'\1', url)))
|
||||
|
||||
def check_auth_cookie(self):
|
||||
|
||||
|
@ -723,12 +1177,13 @@ class GenericProvider:
|
|||
return
|
||||
|
||||
|
||||
class NZBProvider(object, GenericProvider):
|
||||
class NZBProvider(GenericProvider):
|
||||
|
||||
def __init__(self, name, supports_backlog=True, anime_only=False):
|
||||
GenericProvider.__init__(self, name, supports_backlog, anime_only)
|
||||
|
||||
self.providerType = GenericProvider.NZB
|
||||
self.has_limit = True
|
||||
|
||||
def image_name(self):
|
||||
|
||||
|
@ -757,6 +1212,9 @@ class NZBProvider(object, GenericProvider):
|
|||
results = [classes.Proper(x['name'], x['url'], datetime.datetime.fromtimestamp(x['time']), self.show) for x in
|
||||
cache_results]
|
||||
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
||||
index = 0
|
||||
alt_search = ('nzbs_org' == self.get_id())
|
||||
do_search_alt = False
|
||||
|
@ -775,6 +1233,9 @@ class NZBProvider(object, GenericProvider):
|
|||
|
||||
urls = []
|
||||
while index < len(search_terms):
|
||||
if self.should_skip(log_warning=False):
|
||||
break
|
||||
|
||||
search_params = {'q': search_terms[index], 'maxage': sickbeard.BACKLOG_DAYS + 2}
|
||||
if alt_search:
|
||||
|
||||
|
@ -817,7 +1278,7 @@ class NZBProvider(object, GenericProvider):
|
|||
return self._search_provider(search_params=search_params, **kwargs)
|
||||
|
||||
|
||||
class TorrentProvider(object, GenericProvider):
|
||||
class TorrentProvider(GenericProvider):
|
||||
|
||||
def __init__(self, name, supports_backlog=True, anime_only=False, cache_update_freq=None, update_freq=None):
|
||||
GenericProvider.__init__(self, name, supports_backlog, anime_only)
|
||||
|
@ -995,8 +1456,9 @@ class TorrentProvider(object, GenericProvider):
|
|||
return None
|
||||
|
||||
if 10 < len(cur_url) and ((expire and (expire > int(time.time()))) or
|
||||
self._has_signature(helpers.getURL(cur_url, session=self.session))):
|
||||
|
||||
self._has_signature(self.get_url(cur_url, skip_auth=True))):
|
||||
if self.should_skip():
|
||||
return None
|
||||
for k, v in getattr(self, 'url_tmpl', {}).items():
|
||||
self.urls[k] = v % {'home': cur_url, 'vars': getattr(self, 'url_vars', {}).get(k, '')}
|
||||
|
||||
|
@ -1056,15 +1518,17 @@ class TorrentProvider(object, GenericProvider):
|
|||
|
||||
if isinstance(url, type([])):
|
||||
for i in range(0, len(url)):
|
||||
helpers.getURL(url.pop(), session=self.session)
|
||||
self.get_url(url.pop(), skip_auth=True)
|
||||
if self.should_skip():
|
||||
return False
|
||||
|
||||
passfield, userfield = None, None
|
||||
if not url:
|
||||
if hasattr(self, 'urls'):
|
||||
url = self.urls.get('login_action')
|
||||
if url:
|
||||
response = helpers.getURL(url, session=self.session)
|
||||
if None is response:
|
||||
response = self.get_url(url, skip_auth=True)
|
||||
if self.should_skip() or None is response:
|
||||
return False
|
||||
try:
|
||||
post_params = isinstance(post_params, type({})) and post_params or {}
|
||||
|
@ -1104,8 +1568,8 @@ class TorrentProvider(object, GenericProvider):
|
|||
if self.password not in post_params.values():
|
||||
post_params[(passfield, 'password')[not passfield]] = self.password
|
||||
|
||||
response = helpers.getURL(url, post_data=post_params, session=self.session, timeout=timeout)
|
||||
if response:
|
||||
response = self.get_url(url, skip_auth=True, post_data=post_params, timeout=timeout)
|
||||
if not self.should_skip() and response:
|
||||
if logged_in(response):
|
||||
return True
|
||||
|
||||
|
@ -1153,6 +1617,8 @@ class TorrentProvider(object, GenericProvider):
|
|||
:return: list of Proper objects
|
||||
"""
|
||||
results = []
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
||||
search_terms = getattr(self, 'proper_search_terms', ['proper', 'repack', 'real'])
|
||||
if not isinstance(search_terms, list):
|
||||
|
@ -1164,9 +1630,14 @@ class TorrentProvider(object, GenericProvider):
|
|||
|
||||
clean_term = re.compile(r'(?i)[^a-z1-9|.]+')
|
||||
for proper_term in search_terms:
|
||||
if self.should_skip(log_warning=False):
|
||||
break
|
||||
|
||||
proper_check = re.compile(r'(?i)(?:%s)' % clean_term.sub('', proper_term))
|
||||
for item in items:
|
||||
if self.should_skip(log_warning=False):
|
||||
break
|
||||
|
||||
title, url = self._title_and_url(item)
|
||||
if proper_check.search(title):
|
||||
results.append(classes.Proper(title, url, datetime.datetime.today(),
|
||||
|
|
|
@ -66,6 +66,8 @@ class GFTrackerProvider(generic.TorrentProvider):
|
|||
(self.urls['search'] % search_string, '')['Cache' == mode])
|
||||
|
||||
html = self.get_url(search_url)
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
||||
cnt = len(items[mode])
|
||||
try:
|
||||
|
|
|
@ -70,6 +70,8 @@ class GrabTheInfoProvider(generic.TorrentProvider):
|
|||
(self.urls['search'] % search_string, '')['Cache' == mode])
|
||||
|
||||
html = self.get_url(search_url)
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
||||
cnt = len(items[mode])
|
||||
try:
|
||||
|
|
|
@ -105,6 +105,8 @@ class HD4FreeProvider(generic.TorrentProvider):
|
|||
self.token, '+'.join(search_string.split()), self._categories_string(mode, ''), '', '', '')
|
||||
|
||||
resp = self.get_url(search_url, json=True)
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
||||
cnt = len(items[mode])
|
||||
try:
|
||||
|
|
|
@ -48,7 +48,7 @@ class HDBitsProvider(generic.TorrentProvider):
|
|||
|
||||
self.username, self.passkey, self.freeleech, self.minseed, self.minleech = 5 * [None]
|
||||
|
||||
def check_auth_from_data(self, parsed_json):
|
||||
def _check_auth_from_data(self, parsed_json):
|
||||
|
||||
if 'status' in parsed_json and 5 == parsed_json.get('status') and 'message' in parsed_json:
|
||||
logger.log(u'Incorrect username or password for %s: %s' % (self.name, parsed_json['message']), logger.DEBUG)
|
||||
|
@ -112,9 +112,11 @@ class HDBitsProvider(generic.TorrentProvider):
|
|||
search_url = self.urls['search']
|
||||
|
||||
json_resp = self.get_url(search_url, post_data=post_data, json=True)
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
||||
try:
|
||||
if not (json_resp and self.check_auth_from_data(json_resp) and 'data' in json_resp):
|
||||
if not (json_resp and self._check_auth_from_data(json_resp) and 'data' in json_resp):
|
||||
logger.log(u'Response from %s does not contain any json data, abort' % self.name, logger.ERROR)
|
||||
return results
|
||||
except AuthException as e:
|
||||
|
|
|
@ -83,6 +83,8 @@ class HDSpaceProvider(generic.TorrentProvider):
|
|||
search_url += self.urls['search'] % rc['nodots'].sub(' ', search_string)
|
||||
|
||||
html = self.get_url(search_url)
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
||||
cnt = len(items[mode])
|
||||
try:
|
||||
|
|
|
@ -86,6 +86,8 @@ class HDTorrentsProvider(generic.TorrentProvider):
|
|||
self._categories_string(mode, template='category[]=%s')
|
||||
.replace('&category[]=Animation', ('&genre[]=Animation', '')[mode in ['Cache', 'Propers']]))
|
||||
html = self.get_url(search_url)
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
||||
cnt = len(items[mode])
|
||||
try:
|
||||
|
|
|
@ -88,6 +88,8 @@ class IPTorrentsProvider(generic.TorrentProvider):
|
|||
(';free', '')[not self.freeleech], (';o=seeders', '')['Cache' == mode])
|
||||
|
||||
html = self.get_url(search_url)
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
||||
cnt = len(items[mode])
|
||||
try:
|
||||
|
|
|
@ -67,6 +67,8 @@ class LimeTorrentsProvider(generic.TorrentProvider):
|
|||
else self.urls['search'] % (urllib.quote_plus(search_string))
|
||||
|
||||
html = self.get_url(search_url)
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
||||
cnt = len(items[mode])
|
||||
try:
|
||||
|
|
|
@ -54,6 +54,8 @@ class MagnetDLProvider(generic.TorrentProvider):
|
|||
search_url = self.urls['search'] % re.sub('[.\s]+', ' ', search_string)
|
||||
|
||||
html = self.get_url(search_url)
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
||||
cnt = len(items[mode])
|
||||
try:
|
||||
|
|
|
@ -65,6 +65,9 @@ class MoreThanProvider(generic.TorrentProvider):
|
|||
|
||||
# fetches 15 results by default, and up to 100 if allowed in user profile
|
||||
html = self.get_url(search_url)
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
||||
cnt = len(items[mode])
|
||||
try:
|
||||
if not html or self._has_no_results(html):
|
||||
|
|
|
@ -68,6 +68,8 @@ class NcoreProvider(generic.TorrentProvider):
|
|||
|
||||
# fetches 15 results by default, and up to 100 if allowed in user profile
|
||||
html = self.get_url(search_url)
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
||||
cnt = len(items[mode])
|
||||
try:
|
||||
|
|
|
@ -51,7 +51,9 @@ class NebulanceProvider(generic.TorrentProvider):
|
|||
post_params={'keeplogged': '1', 'form_tmpl': True}):
|
||||
return False
|
||||
if not self.user_authkey:
|
||||
response = helpers.getURL(self.urls['user'], session=self.session, json=True)
|
||||
response = self.get_url(self.urls['user'], skip_auth=True, json=True)
|
||||
if self.should_skip():
|
||||
return False
|
||||
if 'response' in response:
|
||||
self.user_authkey, self.user_passkey = [response['response'].get(v) for v in 'authkey', 'passkey']
|
||||
return self.user_authkey
|
||||
|
@ -74,6 +76,8 @@ class NebulanceProvider(generic.TorrentProvider):
|
|||
search_url += self.urls['search'] % rc['nodots'].sub('+', search_string)
|
||||
|
||||
data_json = self.get_url(search_url, json=True)
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
||||
cnt = len(items[mode])
|
||||
try:
|
||||
|
|
|
@ -28,7 +28,7 @@ from math import ceil
|
|||
from sickbeard.sbdatetime import sbdatetime
|
||||
from . import generic
|
||||
from sickbeard import helpers, logger, tvcache, classes, db
|
||||
from sickbeard.common import neededQualities, Quality
|
||||
from sickbeard.common import neededQualities, Quality, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, DOWNLOADED
|
||||
from sickbeard.exceptions import AuthException, MultipleShowObjectsException
|
||||
from sickbeard.indexers.indexer_config import *
|
||||
from io import BytesIO
|
||||
|
@ -291,7 +291,12 @@ class NewznabProvider(generic.NZBProvider):
|
|||
return [x for x in cats if x['id'] not in self.excludes]
|
||||
return ','.join(set(cats.split(',')) - self.excludes)
|
||||
|
||||
def check_auth_from_data(self, data):
|
||||
def _check_auth(self, is_required=None):
|
||||
if self.should_skip():
|
||||
return False
|
||||
return super(NewznabProvider, self)._check_auth(is_required)
|
||||
|
||||
def _check_auth_from_data(self, data, url):
|
||||
|
||||
if data is None or not hasattr(data, 'tag'):
|
||||
return False
|
||||
|
@ -306,6 +311,13 @@ class NewznabProvider(generic.NZBProvider):
|
|||
raise AuthException('Your account on %s has been suspended, contact the admin.' % self.name)
|
||||
elif '102' == code:
|
||||
raise AuthException('Your account isn\'t allowed to use the API on %s, contact the admin.' % self.name)
|
||||
elif '500' == code:
|
||||
try:
|
||||
retry_time, unit = re.findall(r'Retry in (\d+)\W+([a-z]+)', description, flags=re.I)[0]
|
||||
except IndexError:
|
||||
retry_time, unit = None, None
|
||||
self.tmr_limit_update(retry_time, unit, description)
|
||||
self.log_failure_url(url)
|
||||
elif '910' == code:
|
||||
logger.log(
|
||||
'%s %s, please check with provider.' %
|
||||
|
@ -316,6 +328,7 @@ class NewznabProvider(generic.NZBProvider):
|
|||
logger.WARNING)
|
||||
return False
|
||||
|
||||
self.tmr_limit_count = 0
|
||||
return True
|
||||
|
||||
def config_str(self):
|
||||
|
@ -530,15 +543,20 @@ class NewznabProvider(generic.NZBProvider):
|
|||
(hits_per_page * 100 // hits_per_page * 2, hits_per_page * int(ceil(rel_limit * 1.5)))[season_search])
|
||||
|
||||
def find_search_results(self, show, episodes, search_mode, manual_search=False, try_other_searches=False, **kwargs):
|
||||
self._check_auth()
|
||||
check = self._check_auth()
|
||||
results = {}
|
||||
if (isinstance(check, bool) and not check) or self.should_skip():
|
||||
return results
|
||||
|
||||
self.show = show
|
||||
|
||||
results = {}
|
||||
item_list = []
|
||||
name_space = {}
|
||||
|
||||
searched_scene_season = s_mode = None
|
||||
for ep_obj in episodes:
|
||||
if self.should_skip(log_warning=False):
|
||||
break
|
||||
# skip if season already searched
|
||||
if (s_mode or 'sponly' == search_mode) and 1 < len(episodes) \
|
||||
and searched_scene_season == ep_obj.scene_season:
|
||||
|
@ -577,6 +595,8 @@ class NewznabProvider(generic.NZBProvider):
|
|||
try_all_searches=try_other_searches)
|
||||
item_list += items
|
||||
name_space.update(n_space)
|
||||
if self.should_skip():
|
||||
break
|
||||
|
||||
return self.finish_find_search_results(
|
||||
show, episodes, search_mode, manual_search, results, item_list, name_space=name_space)
|
||||
|
@ -617,7 +637,13 @@ class NewznabProvider(generic.NZBProvider):
|
|||
def _search_provider(self, search_params, needed=neededQualities(need_all=True), max_items=400,
|
||||
try_all_searches=False, **kwargs):
|
||||
|
||||
results, n_spaces = [], {}
|
||||
if self.should_skip():
|
||||
return results, n_spaces
|
||||
|
||||
api_key = self._check_auth()
|
||||
if isinstance(api_key, bool) and not api_key:
|
||||
return results, n_spaces
|
||||
|
||||
base_params = {'t': 'tvsearch',
|
||||
'maxage': sickbeard.USENET_RETENTION or 0,
|
||||
|
@ -644,8 +670,13 @@ class NewznabProvider(generic.NZBProvider):
|
|||
cat_webdl = self.cats.get(NewznabConstants.CAT_WEBDL)
|
||||
|
||||
for mode in search_params.keys():
|
||||
if self.should_skip(log_warning=False):
|
||||
break
|
||||
for i, params in enumerate(search_params[mode]):
|
||||
|
||||
if self.should_skip(log_warning=False):
|
||||
break
|
||||
|
||||
# category ids
|
||||
cat = []
|
||||
if 'Episode' == mode or 'Season' == mode:
|
||||
|
@ -697,14 +728,13 @@ class NewznabProvider(generic.NZBProvider):
|
|||
search_url = '%sapi?%s' % (self.url, urllib.urlencode(request_params))
|
||||
i and time.sleep(2.1)
|
||||
|
||||
data = helpers.getURL(search_url)
|
||||
data = self.get_url(search_url)
|
||||
|
||||
if not data:
|
||||
logger.log('No Data returned from %s' % self.name, logger.WARNING)
|
||||
if self.should_skip() or not data:
|
||||
break
|
||||
|
||||
# hack this in until it's fixed server side
|
||||
if data and not data.startswith('<?xml'):
|
||||
if not data.startswith('<?xml'):
|
||||
data = '<?xml version="1.0" encoding="ISO-8859-1" ?>%s' % data
|
||||
|
||||
try:
|
||||
|
@ -714,7 +744,7 @@ class NewznabProvider(generic.NZBProvider):
|
|||
logger.log('Error trying to load %s RSS feed' % self.name, logger.WARNING)
|
||||
break
|
||||
|
||||
if not self.check_auth_from_data(parsed_xml):
|
||||
if not self._check_auth_from_data(parsed_xml, search_url):
|
||||
break
|
||||
|
||||
if 'rss' != parsed_xml.tag:
|
||||
|
@ -794,6 +824,10 @@ class NewznabProvider(generic.NZBProvider):
|
|||
results = [classes.Proper(x['name'], x['url'], datetime.datetime.fromtimestamp(x['time']), self.show) for x in
|
||||
cache_results]
|
||||
|
||||
check = self._check_auth()
|
||||
if isinstance(check, bool) and not check:
|
||||
return results
|
||||
|
||||
index = 0
|
||||
alt_search = ('nzbs_org' == self.get_id())
|
||||
do_search_alt = False
|
||||
|
@ -812,6 +846,9 @@ class NewznabProvider(generic.NZBProvider):
|
|||
|
||||
urls = []
|
||||
while index < len(search_terms):
|
||||
if self.should_skip(log_warning=False):
|
||||
break
|
||||
|
||||
search_params = {'q': search_terms[index], 'maxage': sickbeard.BACKLOG_DAYS + 2}
|
||||
if alt_search:
|
||||
|
||||
|
@ -885,8 +922,11 @@ class NewznabCache(tvcache.TVCache):
|
|||
if 4489 != sickbeard.RECENTSEARCH_FREQUENCY or self.should_update():
|
||||
n_spaces = {}
|
||||
try:
|
||||
self._checkAuth()
|
||||
(items, n_spaces) = self.provider.cache_data(needed=needed)
|
||||
check = self._checkAuth()
|
||||
if isinstance(check, bool) and not check:
|
||||
items = None
|
||||
else:
|
||||
(items, n_spaces) = self.provider.cache_data(needed=needed)
|
||||
except (StandardError, Exception):
|
||||
items = None
|
||||
|
||||
|
|
|
@ -53,6 +53,8 @@ class NyaaProvider(generic.TorrentProvider):
|
|||
search_url = self.urls['search'] % ((0, 2)[self.confirmed], search_string)
|
||||
|
||||
html = self.get_url(search_url)
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
||||
cnt = len(items[mode])
|
||||
try:
|
||||
|
|
|
@ -100,9 +100,12 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
|
|||
result = None
|
||||
if url and False is self._init_api():
|
||||
data = self.get_url(url, timeout=90)
|
||||
if self.should_skip():
|
||||
return result
|
||||
if data:
|
||||
if re.search('(?i)limit.*?reached', data):
|
||||
logger.log('Daily Nzb Download limit reached', logger.DEBUG)
|
||||
self.tmr_limit_update('1', 'h', 'Your 24 hour limit of 10 NZBs has been reached')
|
||||
self.log_failure_url(url)
|
||||
elif '</nzb>' not in data or 'seem to be logged in' in data:
|
||||
logger.log('Failed nzb data response: %s' % data, logger.DEBUG)
|
||||
else:
|
||||
|
@ -138,6 +141,9 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
|
|||
|
||||
def cache_data(self, needed=neededQualities(need_all=True), **kwargs):
|
||||
|
||||
if self.should_skip():
|
||||
return []
|
||||
|
||||
api_key = self._init_api()
|
||||
if False is api_key:
|
||||
return self.search_html(needed=needed, **kwargs)
|
||||
|
@ -153,6 +159,8 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
|
|||
url = self.urls['cache'] % urllib.urlencode(params)
|
||||
|
||||
response = self.get_url(url)
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
||||
data = feedparser.parse(response.replace('<xml', '<?xml').replace('>\n<info>', '?>\n<feed>\n<info>')
|
||||
.replace('<search_req>\n', '').replace('</search_req>\n', '')
|
||||
|
@ -183,6 +191,8 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
|
|||
search_url = self.urls['search'] % urllib.urlencode(params)
|
||||
|
||||
data_json = self.get_url(search_url, json=True)
|
||||
if self.should_skip():
|
||||
return results
|
||||
if data_json and self._check_auth_from_data(data_json, is_xml=False):
|
||||
for item in data_json:
|
||||
if 'release' in item and 'getnzb' in item:
|
||||
|
@ -211,6 +221,8 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
|
|||
mode = ('search', 'cache')['' == search]
|
||||
search_url = self.urls[mode + '_html'] % search
|
||||
html = self.get_url(search_url)
|
||||
if self.should_skip():
|
||||
return results
|
||||
cnt = len(results)
|
||||
try:
|
||||
if not html:
|
||||
|
@ -254,6 +266,8 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
|
|||
|
||||
search_terms = ['.PROPER.', '.REPACK.', '.REAL.']
|
||||
results = []
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
||||
for term in search_terms:
|
||||
for item in self._search_provider(term, search_mode='Propers', retention=4):
|
||||
|
@ -272,6 +286,9 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
|
|||
|
||||
def _init_api(self):
|
||||
|
||||
if self.should_skip():
|
||||
return None
|
||||
|
||||
try:
|
||||
api_key = self._check_auth()
|
||||
if not api_key.startswith('cookie:'):
|
||||
|
|
|
@ -59,6 +59,8 @@ class PiSexyProvider(generic.TorrentProvider):
|
|||
search_url = self.urls['search'] % search_string
|
||||
|
||||
html = self.get_url(search_url)
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
||||
cnt = len(items[mode])
|
||||
try:
|
||||
|
|
|
@ -94,6 +94,8 @@ class PotUKProvider(generic.TorrentProvider):
|
|||
params.setdefault(name, value)
|
||||
del params['doprefs']
|
||||
html = self.get_url(search_url, post_data=params)
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
||||
cnt = len(items[mode])
|
||||
try:
|
||||
|
@ -135,6 +137,9 @@ class PotUKProvider(generic.TorrentProvider):
|
|||
def get_data(self, url):
|
||||
result = None
|
||||
html = self.get_url(url, timeout=90)
|
||||
if self.should_skip():
|
||||
return result
|
||||
|
||||
try:
|
||||
result = self._link(re.findall('(?i)"(attachment\.php[^"]+?)"', html)[0])
|
||||
except IndexError:
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from . import generic
|
||||
from sickbeard.rssfeeds import RSSFeeds
|
||||
from lib.unidecode import unidecode
|
||||
|
||||
|
||||
|
@ -52,7 +51,7 @@ class PreToMeProvider(generic.TorrentProvider):
|
|||
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
|
||||
search_url = url + (self.urls['search'] % search_string, '')['Cache' == mode]
|
||||
|
||||
xml_data = RSSFeeds(self).get_feed(search_url)
|
||||
xml_data = self.cache.get_rss(search_url)
|
||||
|
||||
cnt = len(items[mode])
|
||||
if xml_data and 'entries' in xml_data:
|
||||
|
|
|
@ -97,6 +97,8 @@ class PrivateHDProvider(generic.TorrentProvider):
|
|||
'+'.join(search_string.split()), self._categories_string(mode, ''))
|
||||
|
||||
html = self.get_url(search_url)
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
||||
cnt = len(items[mode])
|
||||
try:
|
||||
|
|
|
@ -85,11 +85,16 @@ class PTFProvider(generic.TorrentProvider):
|
|||
|
||||
search_url = self.urls['search'] % ('+'.join(search_string.split()), self._categories_string(mode))
|
||||
html = self.get_url(search_url)
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
||||
time.sleep(2)
|
||||
if not self.has_all_cookies(['session_key']):
|
||||
if not self._authorised():
|
||||
return results
|
||||
html = self.get_url(search_url)
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
||||
cnt = len(items[mode])
|
||||
try:
|
||||
|
|
|
@ -58,8 +58,8 @@ class RarbgProvider(generic.TorrentProvider):
|
|||
return True
|
||||
|
||||
for r in range(0, 3):
|
||||
response = helpers.getURL(self.urls['api_token'], session=self.session, json=True)
|
||||
if response and 'token' in response:
|
||||
response = self.get_url(self.urls['api_token'], json=True)
|
||||
if not self.should_skip() and response and 'token' in response:
|
||||
self.token = response['token']
|
||||
self.token_expiry = datetime.datetime.now() + datetime.timedelta(minutes=14)
|
||||
return True
|
||||
|
@ -125,6 +125,8 @@ class RarbgProvider(generic.TorrentProvider):
|
|||
searched_url = search_url % {'r': int(self.confirmed), 't': self.token}
|
||||
|
||||
data_json = self.get_url(searched_url, json=True)
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
||||
self.token_expiry = datetime.datetime.now() + datetime.timedelta(minutes=14)
|
||||
self.request_throttle = datetime.datetime.now() + datetime.timedelta(seconds=3)
|
||||
|
|
|
@ -63,6 +63,8 @@ class RevTTProvider(generic.TorrentProvider):
|
|||
|
||||
html = self.get_url(self.urls['search'] % ('+'.join(search_string.split()),
|
||||
self._categories_string(mode)))
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
||||
cnt = len(items[mode])
|
||||
try:
|
||||
|
|
|
@ -21,7 +21,6 @@ from . import generic
|
|||
from sickbeard import logger, tvcache
|
||||
from sickbeard.helpers import tryInt
|
||||
from sickbeard.exceptions import ex
|
||||
from sickbeard.rssfeeds import RSSFeeds
|
||||
from lib.bencode import bdecode
|
||||
|
||||
|
||||
|
@ -41,8 +40,6 @@ class TorrentRssProvider(generic.TorrentProvider):
|
|||
self.search_mode = search_mode
|
||||
self.search_fallback = bool(tryInt(search_fallback))
|
||||
|
||||
self.feeder = RSSFeeds(self)
|
||||
|
||||
def image_name(self):
|
||||
|
||||
return generic.GenericProvider.image_name(self, 'torrentrss')
|
||||
|
@ -102,6 +99,9 @@ class TorrentRssProvider(generic.TorrentProvider):
|
|||
break
|
||||
else:
|
||||
torrent_file = self.get_url(url)
|
||||
if self.should_skip():
|
||||
break
|
||||
|
||||
try:
|
||||
bdecode(torrent_file)
|
||||
break
|
||||
|
@ -120,7 +120,7 @@ class TorrentRssProvider(generic.TorrentProvider):
|
|||
|
||||
result = []
|
||||
for mode in search_params.keys():
|
||||
data = self.feeder.get_feed(self.url)
|
||||
data = self.cache.get_rss(self.url)
|
||||
|
||||
result += (data and 'entries' in data) and data.entries or []
|
||||
|
||||
|
|
|
@ -61,6 +61,8 @@ class SceneHDProvider(generic.TorrentProvider):
|
|||
search_url = self.urls['search'] % (search_string, self._categories_string(mode, '%s', ','))
|
||||
|
||||
html = self.get_url(search_url, timeout=90)
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
||||
cnt = len(items[mode])
|
||||
try:
|
||||
|
|
|
@ -80,6 +80,8 @@ class SceneTimeProvider(generic.TorrentProvider):
|
|||
|
||||
self.session.headers.update({'Referer': self.url + 'browse.php', 'X-Requested-With': 'XMLHttpRequest'})
|
||||
html = self.get_url(self.urls['browse'], post_data=post_data)
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
||||
cnt = len(items[mode])
|
||||
try:
|
||||
|
|
|
@ -49,8 +49,8 @@ class ShazbatProvider(generic.TorrentProvider):
|
|||
def _authorised(self, **kwargs):
|
||||
|
||||
return super(ShazbatProvider, self)._authorised(
|
||||
logged_in=(lambda y=None: '<input type="password"' not in helpers.getURL(
|
||||
self.urls['feeds'], session=self.session)), post_params={'tv_login': self.username, 'form_tmpl': True})
|
||||
logged_in=(lambda y=None: '<input type="password"' not in self.get_url(self.urls['feeds'], skip_auth=True)),
|
||||
post_params={'tv_login': self.username, 'form_tmpl': True})
|
||||
|
||||
def _search_provider(self, search_params, **kwargs):
|
||||
|
||||
|
@ -70,11 +70,16 @@ class ShazbatProvider(generic.TorrentProvider):
|
|||
if 'Cache' == mode:
|
||||
search_url = self.urls['browse']
|
||||
html = self.get_url(search_url)
|
||||
if self.should_skip():
|
||||
return results
|
||||
else:
|
||||
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
|
||||
search_string = search_string.replace(show_detail, '').strip()
|
||||
search_url = self.urls['search'] % search_string
|
||||
html = self.get_url(search_url)
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
||||
shows = rc['show_id'].findall(html)
|
||||
if not any(shows):
|
||||
continue
|
||||
|
@ -85,6 +90,8 @@ class ShazbatProvider(generic.TorrentProvider):
|
|||
continue
|
||||
html and time.sleep(1.1)
|
||||
html += self.get_url(self.urls['show'] % sid)
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
||||
cnt = len(items[mode])
|
||||
try:
|
||||
|
|
|
@ -56,6 +56,8 @@ class SkytorrentsProvider(generic.TorrentProvider):
|
|||
search_url = self.urls['search'] % search_string
|
||||
|
||||
html = self.get_url(search_url)
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
||||
cnt = len(items[mode])
|
||||
try:
|
||||
|
|
|
@ -67,6 +67,8 @@ class SpeedCDProvider(generic.TorrentProvider):
|
|||
jxt=2, jxw='b', freeleech=('on', None)[not self.freeleech])
|
||||
|
||||
data_json = self.get_url(self.urls['search'], post_data=post_data, json=True)
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
||||
cnt = len(items[mode])
|
||||
try:
|
||||
|
|
|
@ -106,7 +106,7 @@ class ThePirateBayProvider(generic.TorrentProvider):
|
|||
quality = Quality.UNKNOWN
|
||||
file_name = None
|
||||
data = self.get_url('%sajax_details_filelist.php?id=%s' % (self.url, torrent_id))
|
||||
if not data:
|
||||
if self.should_skip() or not data:
|
||||
return None
|
||||
|
||||
files_list = re.findall('<td.+>(.*?)</td>', data)
|
||||
|
@ -193,6 +193,8 @@ class ThePirateBayProvider(generic.TorrentProvider):
|
|||
search_url = self.urls['browse'] if 'Cache' == mode \
|
||||
else self.urls['search'] % (urllib.quote(search_string))
|
||||
html = self.get_url(search_url)
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
||||
cnt = len(items[mode])
|
||||
try:
|
||||
|
|
|
@ -49,6 +49,9 @@ class TokyoToshokanProvider(generic.TorrentProvider):
|
|||
'stats': 'S:\s*?(\d)+\s*L:\s*(\d+)', 'size': 'size:\s*(\d+[.,]\d+\w+)'}.iteritems())
|
||||
|
||||
html = self.get_url(search_url)
|
||||
if self.should_skip():
|
||||
return self._sort_seeding(mode, results)
|
||||
|
||||
if html:
|
||||
try:
|
||||
with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
|
||||
|
@ -103,7 +106,7 @@ class TokyoToshokanCache(tvcache.TVCache):
|
|||
|
||||
mode = 'Cache'
|
||||
search_url = '%srss.php?%s' % (self.provider.url, urllib.urlencode({'filter': '1'}))
|
||||
data = self.getRSSFeed(search_url)
|
||||
data = self.get_rss(search_url)
|
||||
|
||||
results = []
|
||||
if data and 'entries' in data:
|
||||
|
|
|
@ -74,6 +74,8 @@ class TorLockProvider(generic.TorrentProvider):
|
|||
else self.urls['search'] % (urllib.quote_plus(search_string).replace('+', '-'))
|
||||
|
||||
html = self.get_url(search_url)
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
||||
cnt = len(items[mode])
|
||||
try:
|
||||
|
|
|
@ -36,7 +36,7 @@ class TorrentBytesProvider(generic.TorrentProvider):
|
|||
self.url_tmpl = {'config_provider_home_uri': '%(home)s', 'login_action': '%(home)s%(vars)s',
|
||||
'search': '%(home)s%(vars)s'}
|
||||
|
||||
self.categories = {'Season': [41, 32], 'Episode': [33, 37, 38]}
|
||||
self.categories = {'Season': [41], 'Episode': [32, 33, 37, 38]}
|
||||
self.categories['Cache'] = self.categories['Season'] + self.categories['Episode']
|
||||
|
||||
self.username, self.password, self.freeleech, self.minseed, self.minleech = 5 * [None]
|
||||
|
@ -61,6 +61,8 @@ class TorrentBytesProvider(generic.TorrentProvider):
|
|||
search_url = self.urls['search'] % (search_string, self._categories_string(mode))
|
||||
|
||||
html = self.get_url(search_url, timeout=90)
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
||||
cnt = len(items[mode])
|
||||
try:
|
||||
|
|
|
@ -86,6 +86,8 @@ class TorrentDayProvider(generic.TorrentProvider):
|
|||
search_string, ('&sort=7&type=desc', '')['Cache' == mode])
|
||||
|
||||
html = self.get_url(search_url)
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
||||
cnt = len(items[mode])
|
||||
try:
|
||||
|
|
|
@ -69,6 +69,8 @@ class TorrentingProvider(generic.TorrentProvider):
|
|||
search_url = self.urls['search'] % (self._categories_string(), search_string)
|
||||
|
||||
html = self.get_url(search_url)
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
||||
cnt = len(items[mode])
|
||||
try:
|
||||
|
|
|
@ -62,6 +62,8 @@ class TorrentLeechProvider(generic.TorrentProvider):
|
|||
'query': isinstance(search_string, unicode) and unidecode(search_string) or search_string}
|
||||
|
||||
html = self.get_url(search_url)
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
||||
cnt = len(items[mode])
|
||||
try:
|
||||
|
|
|
@ -93,6 +93,8 @@ class Torrentz2Provider(generic.TorrentProvider):
|
|||
'tv%s' % ('+' + quote_plus(search_string), '')['Cache' == mode])
|
||||
|
||||
html = self.get_url(search_url)
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
||||
cnt = len(items[mode])
|
||||
try:
|
||||
|
|
|
@ -66,6 +66,8 @@ class TVChaosUKProvider(generic.TorrentProvider):
|
|||
'order': 'desc', 'daysprune': '-1'})
|
||||
|
||||
html = self.get_url(self.urls['search'], **kwargs)
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
||||
cnt = len(items[mode])
|
||||
try:
|
||||
|
|
|
@ -70,6 +70,8 @@ class WOPProvider(generic.TorrentProvider):
|
|||
search_url = self.urls['search'] % (search_string, self._categories_string(mode, 'cats2[]=%s'))
|
||||
|
||||
html = self.get_url(search_url, timeout=90)
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
||||
cnt = len(items[mode])
|
||||
try:
|
||||
|
|
|
@ -58,6 +58,8 @@ class ZooqleProvider(generic.TorrentProvider):
|
|||
search_url = self.urls['search'] % (search_string, self._categories_string(mode, '', ','))
|
||||
|
||||
html = self.get_url(search_url)
|
||||
if self.should_skip():
|
||||
return results
|
||||
|
||||
cnt = len(items[mode])
|
||||
try:
|
||||
|
|
|
@ -5,54 +5,32 @@
|
|||
|
||||
import feedparser
|
||||
|
||||
from sickbeard import helpers, logger
|
||||
from sickbeard import logger
|
||||
from sickbeard.exceptions import ex
|
||||
|
||||
|
||||
class RSSFeeds:
|
||||
|
||||
def __init__(self, provider=None):
|
||||
|
||||
self.provider = provider
|
||||
self.response = None
|
||||
|
||||
def _check_auth_cookie(self):
|
||||
def get_feed(self, url, **kwargs):
|
||||
|
||||
if self.provider:
|
||||
return self.provider.check_auth_cookie()
|
||||
return True
|
||||
if self.provider and self.provider.check_auth_cookie():
|
||||
response = self.provider.get_url(url, **kwargs)
|
||||
if not self.provider.should_skip() and response:
|
||||
try:
|
||||
data = feedparser.parse(response)
|
||||
data['rq_response'] = self.provider.session.response
|
||||
if data and 'entries' in data:
|
||||
return data
|
||||
|
||||
# noinspection PyUnusedLocal
|
||||
def cb_response(self, r, *args, **kwargs):
|
||||
self.response = dict(url=r.url, elapsed=r.elapsed, from_cache=r.from_cache)
|
||||
return r
|
||||
if data and 'error' in data.feed:
|
||||
err_code = data.feed['error']['code']
|
||||
err_desc = data.feed['error']['description']
|
||||
logger.log(u'RSS error:[%s] code:[%s]' % (err_desc, err_code), logger.DEBUG)
|
||||
else:
|
||||
logger.log(u'RSS error loading url: ' + url, logger.DEBUG)
|
||||
|
||||
def get_feed(self, url, request_headers=None, **kwargs):
|
||||
|
||||
if not self._check_auth_cookie():
|
||||
return
|
||||
|
||||
session = None
|
||||
if self.provider and hasattr(self.provider, 'session'):
|
||||
session = self.provider.session
|
||||
|
||||
response = helpers.getURL(url, headers=request_headers, session=session,
|
||||
hooks=dict(response=self.cb_response), **kwargs)
|
||||
if not response:
|
||||
return
|
||||
|
||||
try:
|
||||
feed = feedparser.parse(response)
|
||||
feed['rq_response'] = self.response
|
||||
if feed and 'entries' in feed:
|
||||
return feed
|
||||
|
||||
if feed and 'error' in feed.feed:
|
||||
err_code = feed.feed['error']['code']
|
||||
err_desc = feed.feed['error']['description']
|
||||
logger.log(u'RSS ERROR:[%s] CODE:[%s]' % (err_desc, err_code), logger.DEBUG)
|
||||
else:
|
||||
logger.log(u'RSS error loading url: ' + url, logger.DEBUG)
|
||||
|
||||
except Exception as e:
|
||||
logger.log(u'RSS error: ' + ex(e), logger.DEBUG)
|
||||
except Exception as e:
|
||||
logger.log(u'RSS error: ' + ex(e), logger.DEBUG)
|
||||
|
|
|
@ -27,7 +27,7 @@ from sickbeard.exceptions import ex
|
|||
|
||||
class Scheduler(threading.Thread):
|
||||
def __init__(self, action, cycleTime=datetime.timedelta(minutes=10), run_delay=datetime.timedelta(minutes=0),
|
||||
start_time=None, threadName="ScheduledThread", silent=True, prevent_cycle_run=None):
|
||||
start_time=None, threadName="ScheduledThread", silent=True, prevent_cycle_run=None, paused=False):
|
||||
super(Scheduler, self).__init__()
|
||||
|
||||
self.lastRun = datetime.datetime.now() + run_delay - cycleTime
|
||||
|
@ -38,10 +38,32 @@ class Scheduler(threading.Thread):
|
|||
|
||||
self.name = threadName
|
||||
self.silent = silent
|
||||
self.stop = threading.Event()
|
||||
self._stop = threading.Event()
|
||||
self._unpause = threading.Event()
|
||||
if not paused:
|
||||
self._unpause.set()
|
||||
self.lock = threading.Lock()
|
||||
self.force = False
|
||||
|
||||
def pause(self):
|
||||
self._unpause.clear()
|
||||
|
||||
def unpause(self):
|
||||
self._unpause.set()
|
||||
|
||||
def stop(self):
|
||||
self._stop.set()
|
||||
self.unpause()
|
||||
|
||||
def check_paused(self):
|
||||
if hasattr(self.action, 'check_paused'):
|
||||
if self.action.check_paused():
|
||||
self.pause()
|
||||
self.silent = True
|
||||
else:
|
||||
self.unpause()
|
||||
self.silent = False
|
||||
|
||||
def timeLeft(self):
|
||||
return self.cycleTime - (datetime.datetime.now() - self.lastRun)
|
||||
|
||||
|
@ -52,8 +74,10 @@ class Scheduler(threading.Thread):
|
|||
return False
|
||||
|
||||
def run(self):
|
||||
self.check_paused()
|
||||
|
||||
while not self.stop.is_set():
|
||||
# if self._unpause Event() is NOT set the loop pauses
|
||||
while self._unpause.wait() and not self._stop.is_set():
|
||||
|
||||
try:
|
||||
current_time = datetime.datetime.now()
|
||||
|
@ -100,4 +124,5 @@ class Scheduler(threading.Thread):
|
|||
time.sleep(1)
|
||||
|
||||
# exiting thread
|
||||
self.stop.clear()
|
||||
self._stop.clear()
|
||||
self._unpause.clear()
|
|
@ -143,7 +143,7 @@ def snatch_episode(result, end_status=SNATCHED):
|
|||
# make sure we have the torrent file content
|
||||
if not result.content and not result.url.startswith('magnet'):
|
||||
result.content = result.provider.get_url(result.url)
|
||||
if not result.content:
|
||||
if result.provider.should_skip() or not result.content:
|
||||
logger.log(u'Torrent content failed to download from %s' % result.url, logger.ERROR)
|
||||
return False
|
||||
# Snatches torrent with client
|
||||
|
@ -465,11 +465,18 @@ def search_for_needed_episodes(episodes):
|
|||
best_result.content = None
|
||||
if not best_result.url.startswith('magnet'):
|
||||
best_result.content = best_result.provider.get_url(best_result.url)
|
||||
if best_result.provider.should_skip():
|
||||
break
|
||||
if not best_result.content:
|
||||
continue
|
||||
|
||||
found_results[cur_ep] = best_result
|
||||
|
||||
try:
|
||||
cur_provider.save_list()
|
||||
except (StandardError, Exception):
|
||||
pass
|
||||
|
||||
threading.currentThread().name = orig_thread_name
|
||||
|
||||
if not len(providers):
|
||||
|
|
|
@ -26,8 +26,12 @@ class ProperSearcher:
|
|||
def __init__(self):
|
||||
self.lock = threading.Lock()
|
||||
self.amActive = False
|
||||
self.search_intervals = [('daily', '24 hours', 24 * 60), ('4h', '4 hours', 4 * 60),
|
||||
('90m', '90 mins', 90), ('45m', '45 mins', 45), ('15m', '15 mins', 15)]
|
||||
|
||||
@staticmethod
|
||||
def check_paused():
|
||||
if sickbeard.DOWNLOAD_PROPERS:
|
||||
return False
|
||||
return True
|
||||
|
||||
def run(self):
|
||||
|
||||
|
|
|
@ -21,11 +21,14 @@ from __future__ import with_statement
|
|||
import traceback
|
||||
import threading
|
||||
import datetime
|
||||
import re
|
||||
|
||||
import sickbeard
|
||||
from sickbeard import db, logger, common, exceptions, helpers, network_timezones, generic_queue, search, \
|
||||
failed_history, history, ui, properFinder
|
||||
from sickbeard.search import wanted_episodes, get_aired_in_season, set_wanted_aired
|
||||
from sickbeard.classes import Proper
|
||||
from sickbeard.indexers.indexer_config import INDEXER_TVDB
|
||||
|
||||
|
||||
search_queue_lock = threading.Lock()
|
||||
|
@ -109,7 +112,11 @@ class SearchQueue(generic_queue.GenericQueue):
|
|||
return self._is_in_progress(RecentSearchQueueItem)
|
||||
|
||||
def is_propersearch_in_progress(self):
|
||||
return self._is_in_progress(ProperSearchQueueItem)
|
||||
with self.lock:
|
||||
for cur_item in self.queue + [self.currentItem]:
|
||||
if isinstance(cur_item, ProperSearchQueueItem) and None is cur_item.propers:
|
||||
return True
|
||||
return False
|
||||
|
||||
def is_standard_backlog_in_progress(self):
|
||||
with self.lock:
|
||||
|
@ -141,25 +148,25 @@ class SearchQueue(generic_queue.GenericQueue):
|
|||
return message
|
||||
|
||||
def queue_length(self):
|
||||
length = {'backlog': [], 'recent': 0, 'manual': [], 'failed': [], 'proper': 0}
|
||||
length = {'backlog': [], 'recent': 0, 'manual': [], 'failed': [], 'proper': []}
|
||||
with self.lock:
|
||||
for cur_item in [self.currentItem] + self.queue:
|
||||
if isinstance(cur_item, RecentSearchQueueItem):
|
||||
length['recent'] += 1
|
||||
elif isinstance(cur_item, BacklogQueueItem):
|
||||
length['backlog'].append({'indexerid': cur_item.show.indexerid, 'indexer': cur_item.show.indexer,
|
||||
'name': cur_item.show.name, 'segment': cur_item.segment,
|
||||
'standard_backlog': cur_item.standard_backlog,
|
||||
'limited_backlog': cur_item.limited_backlog, 'forced': cur_item.forced,
|
||||
'torrent_only': cur_item.torrent_only})
|
||||
length['backlog'] += [dict(indexerid=cur_item.show.indexerid, indexer=cur_item.show.indexer,
|
||||
name=cur_item.show.name, segment=cur_item.segment,
|
||||
standard_backlog=cur_item.standard_backlog,
|
||||
limited_backlog=cur_item.limited_backlog, forced=cur_item.forced,
|
||||
torrent_only=cur_item.torrent_only)]
|
||||
elif isinstance(cur_item, ProperSearchQueueItem):
|
||||
length['proper'] += 1
|
||||
length['proper'] += [dict(recent=None is not cur_item.propers)]
|
||||
elif isinstance(cur_item, ManualSearchQueueItem):
|
||||
length['manual'].append({'indexerid': cur_item.show.indexerid, 'indexer': cur_item.show.indexer,
|
||||
'name': cur_item.show.name, 'segment': cur_item.segment})
|
||||
length['manual'] += [dict(indexerid=cur_item.show.indexerid, indexer=cur_item.show.indexer,
|
||||
name=cur_item.show.name, segment=cur_item.segment)]
|
||||
elif isinstance(cur_item, FailedQueueItem):
|
||||
length['failed'].append({'indexerid': cur_item.show.indexerid, 'indexer': cur_item.show.indexer,
|
||||
'name': cur_item.show.name, 'segment': cur_item.segment})
|
||||
length['failed'] += [dict(indexerid=cur_item.show.indexerid, indexer=cur_item.show.indexer,
|
||||
name=cur_item.show.name, segment=cur_item.segment)]
|
||||
return length
|
||||
|
||||
def add_item(self, item):
|
||||
|
@ -210,7 +217,11 @@ class RecentSearchQueueItem(generic_queue.QueueItem):
|
|||
|
||||
self.episodes.extend(wanted_eps)
|
||||
|
||||
if sickbeard.DOWNLOAD_PROPERS:
|
||||
properFinder.get_needed_qualites(needed)
|
||||
|
||||
self.update_providers(needed=needed)
|
||||
self._check_for_propers(needed)
|
||||
|
||||
if not self.episodes:
|
||||
logger.log(u'No search of cache for episodes required')
|
||||
|
@ -244,6 +255,33 @@ class RecentSearchQueueItem(generic_queue.QueueItem):
|
|||
finally:
|
||||
self.finish()
|
||||
|
||||
@staticmethod
|
||||
def _check_for_propers(needed):
|
||||
if not sickbeard.DOWNLOAD_PROPERS:
|
||||
return
|
||||
|
||||
propers = {}
|
||||
my_db = db.DBConnection('cache.db')
|
||||
sql_results = my_db.select('SELECT * FROM provider_cache')
|
||||
re_p = (r'\brepack|proper|real\b', r'\brepack|proper|real|v[1-5]\b')[needed.need_anime]
|
||||
|
||||
proper_regex = re.compile(re_p, flags=re.I)
|
||||
|
||||
for s in sql_results:
|
||||
if proper_regex.search(s['name']):
|
||||
try:
|
||||
show = helpers.find_show_by_id(sickbeard.showList, {INDEXER_TVDB: int(s['indexerid'])})
|
||||
except (StandardError, Exception):
|
||||
continue
|
||||
if show:
|
||||
propers.setdefault(s['provider'], []).append(
|
||||
Proper(s['name'], s['url'], datetime.datetime.fromtimestamp(s['time']), show, parsed_show=show))
|
||||
|
||||
if propers:
|
||||
logger.log('Found Proper/Repack/Real in recent search, sending data to properfinder')
|
||||
propersearch_queue_item = sickbeard.search_queue.ProperSearchQueueItem(propers=propers)
|
||||
sickbeard.searchQueueScheduler.action.add_item(propersearch_queue_item)
|
||||
|
||||
@staticmethod
|
||||
def _change_missing_episodes():
|
||||
if not network_timezones.network_dict:
|
||||
|
@ -326,7 +364,8 @@ class RecentSearchQueueItem(generic_queue.QueueItem):
|
|||
threads[-1].start()
|
||||
|
||||
if not len(providers):
|
||||
logger.log('No NZB/Torrent providers in Media Providers/Options are enabled to match recent episodes', logger.WARNING)
|
||||
logger.log('No NZB/Torrent providers in Media Providers/Options are enabled to match recent episodes',
|
||||
logger.WARNING)
|
||||
|
||||
if threads:
|
||||
# wait for all threads to finish
|
||||
|
@ -337,16 +376,17 @@ class RecentSearchQueueItem(generic_queue.QueueItem):
|
|||
|
||||
|
||||
class ProperSearchQueueItem(generic_queue.QueueItem):
|
||||
def __init__(self):
|
||||
def __init__(self, propers=None):
|
||||
generic_queue.QueueItem.__init__(self, 'Proper Search', PROPER_SEARCH)
|
||||
self.priority = generic_queue.QueuePriorities.HIGH
|
||||
self.priority = (generic_queue.QueuePriorities.VERYHIGH, generic_queue.QueuePriorities.HIGH)[None is propers]
|
||||
self.propers = propers
|
||||
self.success = None
|
||||
|
||||
def run(self):
|
||||
generic_queue.QueueItem.run(self)
|
||||
|
||||
try:
|
||||
properFinder.search_propers()
|
||||
properFinder.search_propers(self.propers)
|
||||
finally:
|
||||
self.finish()
|
||||
|
||||
|
|
|
@ -20,7 +20,7 @@ import datetime
|
|||
import traceback
|
||||
|
||||
import sickbeard
|
||||
from sickbeard import logger, exceptions, ui, db, network_timezones, failed_history
|
||||
from sickbeard import logger, exceptions, ui, db, network_timezones, failed_history, properFinder
|
||||
from sickbeard.exceptions import ex
|
||||
|
||||
|
||||
|
@ -43,6 +43,12 @@ class ShowUpdater:
|
|||
logger.log('network timezone update error', logger.ERROR)
|
||||
logger.log(traceback.format_exc(), logger.ERROR)
|
||||
|
||||
# refresh webdl types
|
||||
try:
|
||||
properFinder.load_webdl_types()
|
||||
except (StandardError, Exception):
|
||||
logger.log('error loading webdl_types', logger.DEBUG)
|
||||
|
||||
# update xem id lists
|
||||
try:
|
||||
sickbeard.scene_exceptions.get_xem_ids()
|
||||
|
|
|
@ -86,6 +86,12 @@ class SubtitlesFinder():
|
|||
The SubtitlesFinder will be executed every hour but will not necessarly search
|
||||
and download subtitles. Only if the defined rule is true
|
||||
"""
|
||||
@staticmethod
|
||||
def check_paused():
|
||||
if sickbeard.USE_SUBTITLES:
|
||||
return False
|
||||
return True
|
||||
|
||||
def run(self, force=False):
|
||||
if len(sickbeard.subtitles.getEnabledServiceList()) < 1:
|
||||
logger.log(u'Not enough services selected. At least 1 service is required to search subtitles in the background', logger.ERROR)
|
||||
|
|
|
@ -107,7 +107,7 @@ class TVCache:
|
|||
|
||||
return []
|
||||
|
||||
def getRSSFeed(self, url, **kwargs):
|
||||
def get_rss(self, url, **kwargs):
|
||||
return RSSFeeds(self.provider).get_feed(url, **kwargs)
|
||||
|
||||
def _translateTitle(self, title):
|
||||
|
|
|
@ -602,6 +602,40 @@ class MainHandler(WebHandler):
|
|||
|
||||
sickbeard.save_config()
|
||||
|
||||
@staticmethod
|
||||
def getFooterTime(change_layout=True, json_dump=True, *args, **kwargs):
|
||||
|
||||
now = datetime.datetime.now()
|
||||
events = [
|
||||
('recent', sickbeard.recentSearchScheduler.timeLeft),
|
||||
('backlog', sickbeard.backlogSearchScheduler.next_backlog_timeleft),
|
||||
]
|
||||
|
||||
if sickbeard.DOWNLOAD_PROPERS:
|
||||
events += [('propers', sickbeard.properFinder.next_proper_timeleft)]
|
||||
|
||||
if change_layout not in (False, 0, '0', '', None):
|
||||
sickbeard.FOOTER_TIME_LAYOUT += 1
|
||||
if sickbeard.FOOTER_TIME_LAYOUT == 2: # 2 layouts = time + delta
|
||||
sickbeard.FOOTER_TIME_LAYOUT = 0
|
||||
sickbeard.save_config()
|
||||
|
||||
next_event = []
|
||||
for k, v in events:
|
||||
try:
|
||||
t = v()
|
||||
except AttributeError:
|
||||
t = None
|
||||
if 0 == sickbeard.FOOTER_TIME_LAYOUT:
|
||||
next_event += [{k + '_time': t and sbdatetime.sbdatetime.sbftime(now + t, markup=True) or 'soon'}]
|
||||
else:
|
||||
next_event += [{k + '_timeleft': t and str(t).split('.')[0] or 'soon'}]
|
||||
|
||||
if json_dump not in (False, 0, '0', '', None):
|
||||
next_event = json.dumps(next_event)
|
||||
|
||||
return next_event
|
||||
|
||||
def toggleDisplayShowSpecials(self, show):
|
||||
|
||||
sickbeard.DISPLAY_SHOW_SPECIALS = not sickbeard.DISPLAY_SHOW_SPECIALS
|
||||
|
@ -1446,7 +1480,7 @@ class Home(MainHandler):
|
|||
|
||||
indexerid = int(showObj.indexerid)
|
||||
indexer = int(showObj.indexer)
|
||||
t.min_initial = Quality.qualityStrings[min(Quality.splitQuality(showObj.quality)[0])]
|
||||
t.min_initial = Quality.get_quality_ui(min(Quality.splitQuality(showObj.quality)[0]))
|
||||
t.all_scene_exceptions = showObj.exceptions
|
||||
t.scene_numbering = get_scene_numbering_for_show(indexerid, indexer)
|
||||
t.scene_absolute_numbering = get_scene_absolute_numbering_for_show(indexerid, indexer)
|
||||
|
@ -4531,11 +4565,27 @@ class ManageSearches(Manage):
|
|||
t.recent_search_status = sickbeard.searchQueueScheduler.action.is_recentsearch_in_progress()
|
||||
t.find_propers_status = sickbeard.searchQueueScheduler.action.is_propersearch_in_progress()
|
||||
t.queue_length = sickbeard.searchQueueScheduler.action.queue_length()
|
||||
t.provider_fail_stats = filter(lambda stat: len(stat['fails']), [{
|
||||
'active': p.is_active(), 'name': p.name, 'prov_id': p.get_id(), 'prov_img': p.image_name(),
|
||||
'fails': p.fails.fails_sorted, 'tmr_limit_time': p.tmr_limit_time,
|
||||
'next_try': p.get_next_try_time, 'has_limit': getattr(p, 'has_limit', False)}
|
||||
for p in sickbeard.providerList + sickbeard.newznabProviderList])
|
||||
t.provider_fails = 0 < len([p for p in t.provider_fail_stats if len(p['fails'])])
|
||||
|
||||
t.submenu = self.ManageMenu('Search')
|
||||
|
||||
return t.respond()
|
||||
|
||||
def retryProvider(self, provider=None, *args, **kwargs):
|
||||
if not provider:
|
||||
return
|
||||
prov = [p for p in sickbeard.providerList + sickbeard.newznabProviderList if p.get_id() == provider]
|
||||
if not prov:
|
||||
return
|
||||
prov[0].retry_next()
|
||||
time.sleep(3)
|
||||
return
|
||||
|
||||
def forceVersionCheck(self, *args, **kwargs):
|
||||
# force a check to see if there is a new version
|
||||
if sickbeard.versionCheckScheduler.action.check_for_new_version(force=True):
|
||||
|
@ -4826,7 +4876,7 @@ class ConfigGeneral(Config):
|
|||
trash_remove_show=None, trash_rotate_logs=None, update_frequency=None, launch_browser=None, web_username=None,
|
||||
use_api=None, api_key=None, indexer_default=None, timezone_display=None, cpu_preset=None, file_logging_preset=None,
|
||||
web_password=None, version_notify=None, enable_https=None, https_cert=None, https_key=None,
|
||||
handle_reverse_proxy=None, send_security_headers=None, home_search_focus=None, sort_article=None, auto_update=None, notify_on_update=None,
|
||||
handle_reverse_proxy=None, send_security_headers=None, home_search_focus=None, display_freespace=None, sort_article=None, auto_update=None, notify_on_update=None,
|
||||
proxy_setting=None, proxy_indexers=None, anon_redirect=None, git_path=None, git_remote=None, calendar_unprotected=None,
|
||||
fuzzy_dating=None, trim_zero=None, date_preset=None, date_preset_na=None, time_preset=None,
|
||||
indexer_timeout=None, rootDir=None, theme_name=None, default_home=None, use_imdb_info=None,
|
||||
|
@ -4882,6 +4932,7 @@ class ConfigGeneral(Config):
|
|||
|
||||
sickbeard.HOME_SEARCH_FOCUS = config.checkbox_to_value(home_search_focus)
|
||||
sickbeard.USE_IMDB_INFO = config.checkbox_to_value(use_imdb_info)
|
||||
sickbeard.DISPLAY_FREESPACE = config.checkbox_to_value(display_freespace)
|
||||
sickbeard.SORT_ARTICLE = config.checkbox_to_value(sort_article)
|
||||
sickbeard.FUZZY_DATING = config.checkbox_to_value(fuzzy_dating)
|
||||
sickbeard.TRIM_ZERO = config.checkbox_to_value(trim_zero)
|
||||
|
@ -4982,7 +5033,6 @@ class ConfigSearch(Config):
|
|||
for show in sickbeard.showList if show.rls_require_words and
|
||||
show.rls_require_words.strip()]
|
||||
t.using_rls_require_words.sort(lambda x, y: cmp(x[1], y[1]), reverse=False)
|
||||
t.propers_intervals = search_propers.ProperSearcher().search_intervals
|
||||
t.using_regex = False
|
||||
try:
|
||||
from sickbeard.name_parser.parser import regex
|
||||
|
@ -4996,7 +5046,7 @@ class ConfigSearch(Config):
|
|||
nzbget_category=None, nzbget_priority=None, nzbget_host=None, nzbget_use_https=None,
|
||||
backlog_days=None, backlog_frequency=None, search_unaired=None, unaired_recent_search_only=None,
|
||||
recentsearch_frequency=None, nzb_method=None, torrent_method=None, usenet_retention=None,
|
||||
download_propers=None, propers_webdl_onegrp=None, check_propers_interval=None,
|
||||
download_propers=None, propers_webdl_onegrp=None,
|
||||
allow_high_priority=None,
|
||||
torrent_dir=None, torrent_username=None, torrent_password=None, torrent_host=None,
|
||||
torrent_label=None, torrent_path=None, torrent_verify_cert=None,
|
||||
|
@ -5033,26 +5083,8 @@ class ConfigSearch(Config):
|
|||
sickbeard.IGNORE_WORDS = ignore_words if ignore_words else ''
|
||||
sickbeard.REQUIRE_WORDS = require_words if require_words else ''
|
||||
|
||||
sickbeard.DOWNLOAD_PROPERS = config.checkbox_to_value(download_propers)
|
||||
config.change_DOWNLOAD_PROPERS(config.checkbox_to_value(download_propers))
|
||||
sickbeard.PROPERS_WEBDL_ONEGRP = config.checkbox_to_value(propers_webdl_onegrp)
|
||||
if sickbeard.CHECK_PROPERS_INTERVAL != check_propers_interval:
|
||||
sickbeard.CHECK_PROPERS_INTERVAL = check_propers_interval
|
||||
|
||||
if sickbeard.DOWNLOAD_PROPERS:
|
||||
proper_sch = sickbeard.properFinderScheduler
|
||||
item = [(k, n, v) for (k, n, v) in proper_sch.action.search_intervals if k == check_propers_interval]
|
||||
if item and None is proper_sch.start_time:
|
||||
interval = datetime.timedelta(minutes=item[0][2])
|
||||
run_in = proper_sch.lastRun + interval - datetime.datetime.now()
|
||||
proper_sch.cycleTime = interval
|
||||
|
||||
run_at = 'imminent'
|
||||
if datetime.timedelta() < run_in:
|
||||
hours, remainder = divmod(run_in.seconds, 3600)
|
||||
minutes, seconds = divmod(remainder, 60)
|
||||
run_at = u'in approx. ' + ('%dh, %dm' % (hours, minutes) if 0 < hours else
|
||||
'%dm, %ds' % (minutes, seconds))
|
||||
logger.log(u'Change search PROPERS interval, next check %s' % run_at)
|
||||
|
||||
sickbeard.SEARCH_UNAIRED = bool(config.checkbox_to_value(search_unaired))
|
||||
sickbeard.UNAIRED_RECENT_SEARCH_ONLY = bool(config.checkbox_to_value(unaired_recent_search_only, value_off=1, value_on=0))
|
||||
|
@ -5126,21 +5158,11 @@ class ConfigPostProcessing(Config):
|
|||
results += ['Unable to create directory ' + os.path.normpath(tv_download_dir) + ', dir not changed.']
|
||||
|
||||
new_val = config.checkbox_to_value(process_automatically)
|
||||
if new_val != sickbeard.PROCESS_AUTOMATICALLY:
|
||||
if not sickbeard.PROCESS_AUTOMATICALLY and not sickbeard.autoPostProcesserScheduler.ident:
|
||||
try:
|
||||
sickbeard.autoPostProcesserScheduler.start()
|
||||
except:
|
||||
pass
|
||||
sickbeard.PROCESS_AUTOMATICALLY = new_val
|
||||
sickbeard.PROCESS_AUTOMATICALLY = new_val
|
||||
sickbeard.autoPostProcesserScheduler.check_paused()
|
||||
|
||||
config.change_AUTOPOSTPROCESSER_FREQUENCY(autopostprocesser_frequency)
|
||||
|
||||
if sickbeard.PROCESS_AUTOMATICALLY:
|
||||
sickbeard.autoPostProcesserScheduler.silent = False
|
||||
else:
|
||||
sickbeard.autoPostProcesserScheduler.silent = True
|
||||
|
||||
if unpack:
|
||||
if self.isRarSupported() != 'not supported':
|
||||
sickbeard.UNPACK = config.checkbox_to_value(unpack)
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
from __future__ import print_function
|
||||
import unittest
|
||||
import test_lib as test
|
||||
from sickbeard import cache_db, mainDB, failed_db
|
||||
|
||||
|
||||
class DBBasicTests(test.SickbeardTestDBCase):
|
||||
|
@ -28,9 +29,16 @@ class DBBasicTests(test.SickbeardTestDBCase):
|
|||
super(DBBasicTests, self).setUp()
|
||||
self.db = test.db.DBConnection()
|
||||
|
||||
def is_testdb(self, version):
|
||||
if isinstance(version, (int, long)):
|
||||
return 100000 <= version
|
||||
|
||||
def test_select(self):
|
||||
self.db.select('SELECT * FROM tv_episodes WHERE showid = ? AND location != ""', [0000])
|
||||
self.db.close()
|
||||
self.assertEqual(cache_db.TEST_BASE_VERSION is not None, self.is_testdb(cache_db.MAX_DB_VERSION))
|
||||
self.assertEqual(mainDB.TEST_BASE_VERSION is not None, self.is_testdb(mainDB.MAX_DB_VERSION))
|
||||
self.assertEqual(failed_db.TEST_BASE_VERSION is not None, self.is_testdb(failed_db.MAX_DB_VERSION))
|
||||
|
||||
if __name__ == '__main__':
|
||||
print('==================')
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue