Merge pull request #1047 from JackDandy/feature/ChangeIntegrateProperIntoRecent

Change integrate proper search into recent search
This commit is contained in:
JackDandy 2018-01-27 17:22:21 +00:00 committed by GitHub
commit 233739a58c
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
17 changed files with 414 additions and 156 deletions

View file

@ -8,6 +8,9 @@
* Add failure handling, skip provider for x hour(s) depending on count of failures
* Add detection of Too Many Requests (Supporting providers UC and BTN)
* Add footer icon button to switch time layouts
* Add performance gains for proper search by integrating it into recent search
* Add the once per day proper finder time to footer, this process catches any propers missed during recent searches
* Add ability to differentiate webdl/rip sources so overwriting propers is always done from the same source (e.g. AMZN)
[develop changelog]

View file

@ -643,7 +643,7 @@ inc_bottom.tmpl
opacity:0.4;filter:alpha(opacity=40);
float:none;
display:inline-block;
margin:0 0 -2px 0;
margin:0 0 -1px 2px;
height:12px;
width:14px
}
@ -653,11 +653,11 @@ inc_bottom.tmpl
}
.footer .icon-glyph.timeleft,
.footer .icon-glyph.time:hover{
background-position:-48px -25px
background-position:-49px -25px
}
.footer .icon-glyph.time,
.footer .icon-glyph.timeleft:hover{
background-position:-192px -121px
background-position:-193px -121px
}
/* =======================================================================

View file

@ -74,18 +74,6 @@
</span>
</label>
</div>
<div class="field-pair">
<label for="check_propers_interval">
<span class="component-title">Check propers every:</span>
<span class="component-desc">
<select id="check_propers_interval" name="check_propers_interval" class="form-control input-sm">
#for $curKey, $curText, $void in $propers_intervals:
<option value="$curKey"#echo ('', $html_selected)[$sickbeard.CHECK_PROPERS_INTERVAL == $curKey]#>$curText</option>
#end for
</select>
</span>
</label>
</div>
</div>
<div class="field-pair">

View file

@ -69,13 +69,13 @@ if min_output:
% (localRoot, str(ep_snatched))
)[0 < ep_snatched]
%>&nbsp;/&nbsp;<span class="footerhighlight">$ep_total</span> episodes downloaded $ep_percentage
#for i, event in enumerate($MainHandler.getFooterTime(ajax_layout=False))
#for i, event in enumerate($MainHandler.getFooterTime(change_layout=False, json_dump=False))
#for k, v in event.items()
#set info = re.findall('(.*)_(timeleft|time)', k)[0]
#if not i
<br><i class="icon-glyph layout $info[1]" title="Change time layout"></i>
<br>next connect <i class="icon-glyph layout $info[1]" title="Change time layout"></i> for...
#end if
| $info[0].replace('-', ' '): <span class="footerhighlight $info[0]">$v</span>
<span id="next-connect-$info[0]">| $info[0].replace('-', ' '): <span class="footerhighlight $info[0]">$v</span></span>
#end for
#end for
#if diskfree
@ -106,18 +106,31 @@ if min_output:
<script>
var footerTimeUrl = '$localRoot/getFooterTime';
#raw
$(function(){
$('.footer').find('.layout').click(function(){
$.getJSON(footerTimeUrl, function(data){
var info, footerIcon$ = $('.footer').find('.icon-glyph.layout');
$.each(data, function(i, eventItems){
$.each(eventItems, function(k, v){
info = k.match(/(.*)_(timeleft|time)/);
$('.footer').find('.' + info[1]).html(v);
footerIcon$.removeClass('time').removeClass('timeleft').addClass(info[2])
});
function getFooterTime(params){
params = /undefined/.test(params) && {} || params;
$.getJSON(footerTimeUrl, params, function(data){
var info, footerIcon$ = $('.footer').find('.icon-glyph.layout'), enabledPropers = !1;
$.each(data, function(i, eventItems){
$.each(eventItems, function(k, v){
info = k.match(/(.*)_(timeleft|time)/);
$('.footer').find('.' + info[1]).html(v);
footerIcon$.removeClass('time').removeClass('timeleft').addClass(info[2]);
enabledPropers |= /propers/.test(info[1]); // enable only if key is found in response
});
});
var propers$ = $('#next-connect-propers');
if(enabledPropers){
propers$.show();
} else {
propers$.hide();
}
});
}
$(function(){
$('.footer').find('.layout').click(function(){
getFooterTime();
});
});
#end raw

View file

@ -118,7 +118,7 @@
#if $fail['http']['count']
#set $title=$fail['http']['code']
#end if
<td>#if $fail['http']['count']#<span title="#if $child#$title#else#Expand for fail codes#end if#">$fail['http']['count']</span>#else#$blank#end if# / #echo $fail['timeout'].get('count', 0) or $blank#</td>
<td>#if $fail['http']['count']#<span title="#if $child or not $fail['multirow']#$title#else#Expand for fail codes#end if#">$fail['http']['count']</span>#else#$blank#end if# / #echo $fail['timeout'].get('count', 0) or $blank#</td>
<td>#echo ($fail['connection'].get('count', 0) + $fail['connection_timeout'].get('count', 0)) or $blank#</td>
<td>#echo $fail['nodata'].get('count', 0) or $blank#</td>
<td>#echo $fail['other'].get('count', 0) or $blank#</td>
@ -148,7 +148,26 @@
<div id="queue-proper" class="section">
Proper: <i>$queue_length['proper'] item$sickbeard.helpers.maybe_plural($queue_length['proper'])</i>
Proper: <i>$len($queue_length['proper']) item$sickbeard.helpers.maybe_plural($queue_length['proper'])</i>
#if $queue_length['proper']
<input type="button" class="shows-more btn" id="proper-btn-more" value="Expand" #if not $queue_length['proper']# style="display:none" #end if#><input type="button" class="shows-less btn" id="proper-btn-less" value="Collapse" style="display:none"><br>
<table class="sickbeardTable manageTable" cellspacing="1" border="0" cellpadding="0" style="display:none">
<thead></thead>
<tbody>
#set $row = 0
#for $cur_item in $queue_length['proper']:
#if $cur_item['recent']:
#set $search_type = 'Recent'
#else
#set $search_type = 'Scheduled'
#end if
<tr class="#echo ('odd', 'even')[$row % 2]##set $row+=1#">
<td style="width:20%;text-align:center;color:white">$search_type</td>
</tr>
#end for
</tbody>
</table>
#end if
</div>

View file

@ -262,6 +262,11 @@ function config_success(response) {
$(this).show();
});
$('#email_show').trigger('notify');
// update footer only on the config page for the propers option
if('saveSearch' == $('#configForm').attr('action')){
getFooterTime({'change_layout': 0});
}
}
function fetch_pullrequests() {

View file

@ -37,7 +37,7 @@ sys.path.insert(1, os.path.abspath('../lib'))
from sickbeard import helpers, encodingKludge as ek
from sickbeard import db, image_cache, logger, naming, metadata, providers, scene_exceptions, scene_numbering, \
scheduler, auto_post_processer, search_queue, search_propers, search_recent, search_backlog, \
show_queue, show_updater, subtitles, traktChecker, version_checker, indexermapper, classes
show_queue, show_updater, subtitles, traktChecker, version_checker, indexermapper, classes, properFinder
from sickbeard.config import CheckSection, check_setting_int, check_setting_str, ConfigMigrator, minimax
from sickbeard.common import SD, SKIPPED
from sickbeard.databases import mainDB, cache_db, failed_db
@ -210,8 +210,8 @@ USENET_RETENTION = None
TORRENT_METHOD = None
TORRENT_DIR = None
DOWNLOAD_PROPERS = False
CHECK_PROPERS_INTERVAL = None
PROPERS_WEBDL_ONEGRP = True
WEBDL_TYPES = []
ALLOW_HIGH_PRIORITY = False
NEWZNAB_DATA = ''
@ -595,7 +595,7 @@ def initialize(console_logging=True):
global BRANCH, CUR_COMMIT_BRANCH, GIT_REMOTE, CUR_COMMIT_HASH, GIT_PATH, CPU_PRESET, ANON_REDIRECT, \
ENCRYPTION_VERSION, PROXY_SETTING, PROXY_INDEXERS, FILE_LOGGING_PRESET
# Search Settings/Episode
global DOWNLOAD_PROPERS, PROPERS_WEBDL_ONEGRP, CHECK_PROPERS_INTERVAL, RECENTSEARCH_FREQUENCY, \
global DOWNLOAD_PROPERS, PROPERS_WEBDL_ONEGRP, WEBDL_TYPES, RECENTSEARCH_FREQUENCY, \
BACKLOG_DAYS, BACKLOG_NOFULL, BACKLOG_FREQUENCY, USENET_RETENTION, IGNORE_WORDS, REQUIRE_WORDS, \
ALLOW_HIGH_PRIORITY, SEARCH_UNAIRED, UNAIRED_RECENT_SEARCH_ONLY
# Search Settings/NZB search
@ -846,9 +846,6 @@ def initialize(console_logging=True):
DOWNLOAD_PROPERS = bool(check_setting_int(CFG, 'General', 'download_propers', 1))
PROPERS_WEBDL_ONEGRP = bool(check_setting_int(CFG, 'General', 'propers_webdl_onegrp', 1))
CHECK_PROPERS_INTERVAL = check_setting_str(CFG, 'General', 'check_propers_interval', '')
if CHECK_PROPERS_INTERVAL not in ('15m', '45m', '90m', '4h', 'daily'):
CHECK_PROPERS_INTERVAL = 'daily'
ALLOW_HIGH_PRIORITY = bool(check_setting_int(CFG, 'General', 'allow_high_priority', 1))
@ -1375,19 +1372,17 @@ def initialize(console_logging=True):
prevent_cycle_run=searchQueueScheduler.action.is_standard_backlog_in_progress)
propers_searcher = search_propers.ProperSearcher()
item = [(k, n, v) for (k, n, v) in propers_searcher.search_intervals if k == CHECK_PROPERS_INTERVAL]
if item:
update_interval = datetime.timedelta(minutes=item[0][2])
run_at = None
last_proper_search = datetime.datetime.fromtimestamp(properFinder.get_last_proper_search())
time_diff = datetime.timedelta(days=1) - (datetime.datetime.now() - last_proper_search)
if time_diff < datetime.timedelta(seconds=0):
properdelay = 20
else:
update_interval = datetime.timedelta(hours=1)
run_at = datetime.time(hour=1) # 1 AM
properdelay = helpers.tryInt((time_diff.total_seconds() / 60) + 5, 20)
properFinderScheduler = scheduler.Scheduler(
propers_searcher,
cycleTime=update_interval,
run_delay=update_interval,
start_time=run_at,
cycleTime=datetime.timedelta(days=1),
run_delay=datetime.timedelta(minutes=properdelay),
threadName='FINDPROPERS',
prevent_cycle_run=searchQueueScheduler.action.is_propersearch_in_progress)
@ -1579,7 +1574,6 @@ def save_config():
new_config['General']['update_frequency'] = int(UPDATE_FREQUENCY)
new_config['General']['download_propers'] = int(DOWNLOAD_PROPERS)
new_config['General']['propers_webdl_onegrp'] = int(PROPERS_WEBDL_ONEGRP)
new_config['General']['check_propers_interval'] = CHECK_PROPERS_INTERVAL
new_config['General']['allow_high_priority'] = int(ALLOW_HIGH_PRIORITY)
new_config['General']['recentsearch_startup'] = int(RECENTSEARCH_STARTUP)
new_config['General']['backlog_nofull'] = int(BACKLOG_NOFULL)

View file

@ -546,6 +546,24 @@ class neededQualities(object):
if isinstance(v, bool) and True is v:
self.need_sd = self.need_hd = self.need_uhd = self.need_webdl = True
def all_show_qualities_needed(self, show):
from sickbeard.tv import TVShow
if isinstance(show, TVShow):
init, upgrade = Quality.splitQuality(show.quality)
all_qual = set(init + upgrade)
need_sd = need_hd = need_uhd = need_webdl = False
for wanted_qualities in all_qual:
if not need_sd and wanted_qualities <= neededQualities.max_sd:
need_sd = True
if not need_hd and wanted_qualities in neededQualities.hd_qualities:
need_hd = True
if not need_webdl and wanted_qualities in neededQualities.webdl_qualities:
need_webdl = True
if not need_uhd and wanted_qualities > neededQualities.max_hd:
need_uhd = True
return self.need_sd == need_sd and self.need_hd == need_hd and self.need_webdl == need_webdl and \
self.need_uhd == need_uhd
def check_needed_types(self, show):
if getattr(show, 'is_anime', False):
self.need_anime = True

View file

@ -27,7 +27,7 @@ from sickbeard import encodingKludge as ek
from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException
MIN_DB_VERSION = 9 # oldest db version we support migrating from
MAX_DB_VERSION = 20006
MAX_DB_VERSION = 20008
class MainSanityCheck(db.DBSanityCheck):
@ -1258,3 +1258,23 @@ class AddFlagTable(db.SchemaUpgrade):
self.setDBVersion(20006)
return self.checkDBVersion()
# 20006 -> 20007
class DBIncreaseTo20007(db.SchemaUpgrade):
def execute(self):
logger.log(u'Bumping database version')
self.setDBVersion(20007)
return self.checkDBVersion()
# 20007 -> 20008
class AddWebdlTypesTable(db.SchemaUpgrade):
def execute(self):
db.backup_database('sickbeard.db', self.checkDBVersion())
self.connection.action('CREATE TABLE webdl_types (dname TEXT NOT NULL , regex TEXT NOT NULL )')
self.setDBVersion(20008)
return self.checkDBVersion()

View file

@ -512,7 +512,9 @@ def MigrationCode(myDB):
20002: sickbeard.mainDB.AddTvShowTags,
20003: sickbeard.mainDB.ChangeMapIndexer,
20004: sickbeard.mainDB.AddShowNotFoundCounter,
20005: sickbeard.mainDB.AddFlagTable
20005: sickbeard.mainDB.AddFlagTable,
20006: sickbeard.mainDB.DBIncreaseTo20007,
20007: sickbeard.mainDB.AddWebdlTypesTable,
# 20002: sickbeard.mainDB.AddCoolSickGearFeature3,
}

View file

@ -226,7 +226,10 @@ def update_network_dict():
try:
for line in url_data.splitlines():
(key, val) = line.decode('utf-8').strip().rsplit(u':', 1)
try:
(key, val) = line.decode('utf-8').strip().rsplit(u':', 1)
except (StandardError, Exception):
continue
if key is None or val is None:
continue
d[key] = val

View file

@ -27,27 +27,28 @@ import sickbeard
from sickbeard import db, exceptions, helpers, history, logger, search, show_name_helpers
from sickbeard import encodingKludge as ek
from sickbeard.common import DOWNLOADED, SNATCHED_ANY, SNATCHED_PROPER, Quality, ARCHIVED, FAILED
from sickbeard.common import DOWNLOADED, SNATCHED_ANY, SNATCHED_PROPER, Quality, ARCHIVED, FAILED, neededQualities
from sickbeard.exceptions import ex, MultipleShowObjectsException
from sickbeard import failed_history
from sickbeard.history import dateFormat
from sickbeard.sbdatetime import sbdatetime
from name_parser.parser import NameParser, InvalidNameException, InvalidShowException
def search_propers():
def search_propers(proper_list=None):
if not sickbeard.DOWNLOAD_PROPERS:
return
logger.log(u'Beginning search for new propers')
logger.log(('Checking propers from recent search', 'Beginning search for new propers')[None is proper_list])
age_shows, age_anime = sickbeard.BACKLOG_DAYS + 2, 14
aired_since_shows = datetime.datetime.today() - datetime.timedelta(days=age_shows)
aired_since_anime = datetime.datetime.today() - datetime.timedelta(days=age_anime)
recent_shows, recent_anime = _recent_history(aired_since_shows, aired_since_anime)
if recent_shows or recent_anime:
propers = _get_proper_list(aired_since_shows, recent_shows, recent_anime)
propers = _get_proper_list(aired_since_shows, recent_shows, recent_anime, proper_list=proper_list)
if propers:
_download_propers(propers)
@ -55,52 +56,59 @@ def search_propers():
logger.log(u'No downloads or snatches found for the last %s%s days to use for a propers search' %
(age_shows, ('', ' (%s for anime)' % age_anime)[helpers.has_anime()]))
_set_last_proper_search(datetime.datetime.today().toordinal())
run_at = ''
proper_sch = sickbeard.properFinderScheduler
if None is proper_sch.start_time:
run_in = proper_sch.lastRun + proper_sch.cycleTime - datetime.datetime.now()
run_at = u', next check '
if datetime.timedelta() > run_in:
run_at += u'imminent'
else:
hours, remainder = divmod(run_in.seconds, 3600)
minutes, seconds = divmod(remainder, 60)
run_at += u'in approx. ' + ('%dh, %dm' % (hours, minutes) if 0 < hours else '%dm, %ds' % (minutes, seconds))
if None is proper_list:
_set_last_proper_search(datetime.datetime.now())
logger.log(u'Completed the search for new propers%s' % run_at)
proper_sch = sickbeard.properFinderScheduler
if None is proper_sch.start_time:
run_in = proper_sch.lastRun + proper_sch.cycleTime - datetime.datetime.now()
run_at = u', next check '
if datetime.timedelta() > run_in:
run_at += u'imminent'
else:
hours, remainder = divmod(run_in.seconds, 3600)
minutes, seconds = divmod(remainder, 60)
run_at += u'in approx. ' + ('%dh, %dm' % (hours, minutes) if 0 < hours else
'%dm, %ds' % (minutes, seconds))
logger.log(u'Completed search for new propers%s' % run_at)
else:
logger.log(u'Completed checking propers from recent search')
def get_old_proper_level(showObj, indexer, indexerid, season, episodes, old_status, new_quality,
def get_old_proper_level(show_obj, indexer, indexerid, season, episodes, old_status, new_quality,
extra_no_name, version, is_anime=False):
level = 0
is_internal = False
codec = ''
rel_name = None
if old_status not in SNATCHED_ANY:
level = Quality.get_proper_level(extra_no_name, version, is_anime)
elif showObj:
myDB = db.DBConnection()
np = NameParser(False, showObj=showObj)
elif show_obj:
my_db = db.DBConnection()
np = NameParser(False, showObj=show_obj)
for episode in episodes:
result = myDB.select('SELECT resource FROM history WHERE showid = ? AND season = ? AND episode = ? AND '
'(' + ' OR '.join("action LIKE '%%%02d'" % x for x in SNATCHED_ANY) + ') '
'ORDER BY date DESC LIMIT 1',
[indexerid, season, episode])
result = my_db.select('SELECT resource FROM history WHERE showid = ? AND season = ? AND episode = ? AND '
'(' + ' OR '.join("action LIKE '%%%02d'" % x for x in SNATCHED_ANY) + ') '
'ORDER BY date DESC LIMIT 1',
[indexerid, season, episode])
if not result or not isinstance(result[0]['resource'], basestring) or not result[0]['resource']:
continue
nq = Quality.sceneQuality(result[0]['resource'], showObj.is_anime)
nq = Quality.sceneQuality(result[0]['resource'], show_obj.is_anime)
if nq != new_quality:
continue
try:
p = np.parse(result[0]['resource'])
except (StandardError, Exception):
continue
level = Quality.get_proper_level(p.extra_info_no_name(), p.version, showObj.is_anime)
level = Quality.get_proper_level(p.extra_info_no_name(), p.version, show_obj.is_anime)
extra_no_name = p.extra_info_no_name()
rel_name = result[0]['resource']
is_internal = p.extra_info_no_name() and re.search(r'\binternal\b', p.extra_info_no_name(), flags=re.I)
codec = _get_codec(p.extra_info_no_name())
break
return level, is_internal, codec
return level, is_internal, codec, extra_no_name, rel_name
def _get_codec(extra_info_no_name):
@ -110,12 +118,62 @@ def _get_codec(extra_info_no_name):
return '264'
elif re.search(r'\bxvid\b', extra_info_no_name, flags=re.I):
return 'xvid'
elif re.search(r'\b[xh]265|hevc\b', extra_info_no_name, flags=re.I):
elif re.search(r'\b[xh]\W?265|hevc\b', extra_info_no_name, flags=re.I):
return 'hevc'
return ''
def _get_proper_list(aired_since_shows, recent_shows, recent_anime):
def get_webdl_type(extra_info_no_name, rel_name):
if not sickbeard.WEBDL_TYPES:
load_webdl_types()
for t in sickbeard.WEBDL_TYPES:
try:
if re.search(r'\b%s\b' % t[1], extra_info_no_name, flags=re.I):
return t[0]
except (StandardError, Exception):
continue
return ('webdl', 'webrip')[None is re.search(r'\bweb.?dl\b', rel_name, flags=re.I)]
def load_webdl_types():
new_types = []
default_types = [('Amazon', r'AMZN|AMAZON'), ('Netflix', r'NETFLIX|NF'), ('Hulu', r'HULU')]
url = 'https://raw.githubusercontent.com/SickGear/sickgear.extdata/master/SickGear/webdl_types.txt'
url_data = helpers.getURL(url)
if url_data:
try:
for line in url_data.splitlines():
try:
(key, val) = line.decode('utf-8').strip().split(u'::', 1)
except (StandardError, Exception):
continue
if key is None or val is None:
continue
new_types.append((key, val))
except (IOError, OSError):
pass
my_db = db.DBConnection()
sql_results = my_db.select('SELECT * FROM webdl_types')
old_types = [(r['dname'], r['regex']) for r in sql_results]
cl = []
for nt in new_types:
if nt not in old_types:
cl.append(['REPLACE INTO webdl_types (dname, regex) VALUES (?,?)', [nt[0], nt[1]]])
for ot in old_types:
if ot not in new_types:
cl.append(['DELETE FROM webdl_types WHERE dname = ? AND regex = ?', [ot[0], ot[1]]])
if cl:
my_db.mass_action(cl)
sickbeard.WEBDL_TYPES = new_types + default_types
def _get_proper_list(aired_since_shows, recent_shows, recent_anime, proper_list=None):
propers = {}
# for each provider get a list of the
@ -124,22 +182,28 @@ def _get_proper_list(aired_since_shows, recent_shows, recent_anime):
for cur_provider in providers:
if not recent_anime and cur_provider.anime_only:
continue
threading.currentThread().name = orig_thread_name + ' :: [' + cur_provider.name + ']'
logger.log(u'Searching for new PROPER releases')
if None is not proper_list:
found_propers = proper_list.get(cur_provider.get_id(), [])
if not found_propers:
continue
else:
threading.currentThread().name = orig_thread_name + ' :: [' + cur_provider.name + ']'
try:
found_propers = cur_provider.find_propers(search_date=aired_since_shows, shows=recent_shows,
anime=recent_anime)
except exceptions.AuthException as e:
logger.log(u'Authentication error: ' + ex(e), logger.ERROR)
continue
except Exception as e:
logger.log(u'Error while searching ' + cur_provider.name + ', skipping: ' + ex(e), logger.ERROR)
logger.log(traceback.format_exc(), logger.ERROR)
continue
finally:
threading.currentThread().name = orig_thread_name
logger.log(u'Searching for new PROPER releases')
try:
found_propers = cur_provider.find_propers(search_date=aired_since_shows, shows=recent_shows,
anime=recent_anime)
except exceptions.AuthException as e:
logger.log(u'Authentication error: ' + ex(e), logger.ERROR)
continue
except Exception as e:
logger.log(u'Error while searching ' + cur_provider.name + ', skipping: ' + ex(e), logger.ERROR)
logger.log(traceback.format_exc(), logger.ERROR)
continue
finally:
threading.currentThread().name = orig_thread_name
# if they haven't been added by a different provider than add the proper to the list
count = 0
@ -162,7 +226,7 @@ def _get_proper_list(aired_since_shows, recent_shows, recent_anime):
parse_result.is_anime,
check_is_repack=True)
x.is_internal = parse_result.extra_info_no_name() and \
re.search(r'\binternal\b', parse_result.extra_info_no_name(), flags=re.I)
re.search(r'\binternal\b', parse_result.extra_info_no_name(), flags=re.I)
x.codec = _get_codec(parse_result.extra_info_no_name())
propers[name] = x
count += 1
@ -255,11 +319,12 @@ def _get_proper_list(aired_since_shows, recent_shows, recent_anime):
except (StandardError, Exception):
extra_info = None
old_proper_level, old_is_internal, old_codec = get_old_proper_level(parse_result.show, cur_proper.indexer,
cur_proper.indexerid, cur_proper.season,
parse_result.episode_numbers, old_status,
cur_proper.quality, extra_info,
cur_proper.version, cur_proper.is_anime)
old_proper_level, old_is_internal, old_codec, old_extra_no_name, old_name = \
get_old_proper_level(parse_result.show, cur_proper.indexer, cur_proper.indexerid, cur_proper.season,
parse_result.episode_numbers, old_status, cur_proper.quality, extra_info,
cur_proper.version, cur_proper.is_anime)
old_name = (old_name, sql_results[0]['release_name'])[old_name in ('', None)]
if cur_proper.proper_level < old_proper_level:
continue
elif cur_proper.proper_level == old_proper_level:
@ -273,11 +338,20 @@ def _get_proper_list(aired_since_shows, recent_shows, recent_anime):
log_same_grp = 'Skipping proper from release group: [%s], does not match existing release group: [%s] for [%s]'\
% (cur_proper.release_group, old_release_group, cur_proper.name)
is_web = (old_quality in (Quality.HDWEBDL, Quality.FULLHDWEBDL, Quality.UHD4KWEB) or
(old_quality == Quality.SDTV and re.search(r'\Wweb.?(dl|rip|.[hx]26[45])\W',
str(sql_results[0]['release_name']), re.I)))
if is_web:
old_webdl_type = get_webdl_type(old_extra_no_name, old_name)
new_webdl_type = get_webdl_type(cur_proper.extra_info_no_name, cur_proper.name)
if old_webdl_type != new_webdl_type:
logger.log('Skipping proper webdl source: [%s], does not match existing webdl source: [%s] for [%s]'
% (old_webdl_type, new_webdl_type, cur_proper.name), logger.DEBUG)
continue
# for webldls, prevent propers from different groups
if sickbeard.PROPERS_WEBDL_ONEGRP and \
(old_quality in (Quality.HDWEBDL, Quality.FULLHDWEBDL, Quality.UHD4KWEB) or
(old_quality == Quality.SDTV and re.search(r'\Wweb.?(dl|rip|.[hx]26[45])\W', str(sql_results[0]['release_name']), re.I))) and \
cur_proper.release_group != old_release_group:
if sickbeard.PROPERS_WEBDL_ONEGRP and is_web and cur_proper.release_group != old_release_group:
logger.log(log_same_grp, logger.DEBUG)
continue
@ -375,6 +449,46 @@ def _download_propers(proper_list):
search.snatch_episode(result, SNATCHED_PROPER)
def get_needed_qualites(needed=None):
if not isinstance(needed, neededQualities):
needed = neededQualities()
if not sickbeard.DOWNLOAD_PROPERS or needed.all_needed:
return needed
age_shows, age_anime = sickbeard.BACKLOG_DAYS + 2, 14
aired_since_shows = datetime.datetime.today() - datetime.timedelta(days=age_shows)
aired_since_anime = datetime.datetime.today() - datetime.timedelta(days=age_anime)
my_db = db.DBConnection()
sql_results = my_db.select(
'SELECT DISTINCT s.indexer, s.indexer_id, e.season, e.episode FROM history as h' +
' INNER JOIN tv_episodes AS e ON (h.showid == e.showid AND h.season == e.season AND h.episode == e.episode)' +
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
' WHERE h.date >= %s' % min(aired_since_shows, aired_since_anime).strftime(dateFormat) +
' AND (%s)' % ' OR '.join(['h.action LIKE "%%%02d"' % x for x in SNATCHED_ANY + [DOWNLOADED, FAILED]])
)
for sql_episode in sql_results:
if needed.all_needed:
break
try:
show = helpers.find_show_by_id(
sickbeard.showList, {int(sql_episode['indexer']): int(sql_episode['indexer_id'])})
except MultipleShowObjectsException:
continue
if show:
needed.check_needed_types(show)
if needed.all_show_qualities_needed(show) or needed.all_qualities_needed:
continue
ep_obj = show.getEpisode(season=sql_episode['season'], episode=sql_episode['episode'])
if ep_obj:
ep_status, ep_quality = Quality.splitCompositeStatus(ep_obj.status)
if ep_status in SNATCHED_ANY + [DOWNLOADED, ARCHIVED]:
needed.check_needed_qualities([ep_quality])
return needed
def _recent_history(aired_since_shows, aired_since_anime):
recent_shows, recent_anime = [], []
@ -418,19 +532,23 @@ def _set_last_proper_search(when):
if 0 == len(sql_results):
my_db.action('INSERT INTO info (last_backlog, last_indexer, last_proper_search) VALUES (?,?,?)',
[0, 0, str(when)])
[0, 0, sbdatetime.totimestamp(when)])
else:
my_db.action('UPDATE info SET last_proper_search=%s' % when)
my_db.action('UPDATE info SET last_proper_search=%s' % sbdatetime.totimestamp(when))
def _get_last_proper_search():
def next_proper_timeleft():
return sickbeard.properFinderScheduler.timeLeft()
def get_last_proper_search():
my_db = db.DBConnection()
sql_results = my_db.select('SELECT * FROM info')
try:
last_proper_search = datetime.date.fromordinal(int(sql_results[0]['last_proper_search']))
last_proper_search = int(sql_results[0]['last_proper_search'])
except (StandardError, Exception):
return datetime.date.fromordinal(1)
return 1
return last_proper_search

View file

@ -26,8 +26,6 @@ class ProperSearcher:
def __init__(self):
self.lock = threading.Lock()
self.amActive = False
self.search_intervals = [('daily', '24 hours', 24 * 60), ('4h', '4 hours', 4 * 60),
('90m', '90 mins', 90), ('45m', '45 mins', 45), ('15m', '15 mins', 15)]
@staticmethod
def check_paused():

View file

@ -21,11 +21,14 @@ from __future__ import with_statement
import traceback
import threading
import datetime
import re
import sickbeard
from sickbeard import db, logger, common, exceptions, helpers, network_timezones, generic_queue, search, \
failed_history, history, ui, properFinder
from sickbeard.search import wanted_episodes, get_aired_in_season, set_wanted_aired
from sickbeard.classes import Proper
from sickbeard.indexers.indexer_config import INDEXER_TVDB
search_queue_lock = threading.Lock()
@ -109,7 +112,11 @@ class SearchQueue(generic_queue.GenericQueue):
return self._is_in_progress(RecentSearchQueueItem)
def is_propersearch_in_progress(self):
return self._is_in_progress(ProperSearchQueueItem)
with self.lock:
for cur_item in self.queue + [self.currentItem]:
if isinstance(cur_item, ProperSearchQueueItem) and None is cur_item.propers:
return True
return False
def is_standard_backlog_in_progress(self):
with self.lock:
@ -141,25 +148,25 @@ class SearchQueue(generic_queue.GenericQueue):
return message
def queue_length(self):
length = {'backlog': [], 'recent': 0, 'manual': [], 'failed': [], 'proper': 0}
length = {'backlog': [], 'recent': 0, 'manual': [], 'failed': [], 'proper': []}
with self.lock:
for cur_item in [self.currentItem] + self.queue:
if isinstance(cur_item, RecentSearchQueueItem):
length['recent'] += 1
elif isinstance(cur_item, BacklogQueueItem):
length['backlog'].append({'indexerid': cur_item.show.indexerid, 'indexer': cur_item.show.indexer,
'name': cur_item.show.name, 'segment': cur_item.segment,
'standard_backlog': cur_item.standard_backlog,
'limited_backlog': cur_item.limited_backlog, 'forced': cur_item.forced,
'torrent_only': cur_item.torrent_only})
length['backlog'] += [dict(indexerid=cur_item.show.indexerid, indexer=cur_item.show.indexer,
name=cur_item.show.name, segment=cur_item.segment,
standard_backlog=cur_item.standard_backlog,
limited_backlog=cur_item.limited_backlog, forced=cur_item.forced,
torrent_only=cur_item.torrent_only)]
elif isinstance(cur_item, ProperSearchQueueItem):
length['proper'] += 1
length['proper'] += [dict(recent=None is not cur_item.propers)]
elif isinstance(cur_item, ManualSearchQueueItem):
length['manual'].append({'indexerid': cur_item.show.indexerid, 'indexer': cur_item.show.indexer,
'name': cur_item.show.name, 'segment': cur_item.segment})
length['manual'] += [dict(indexerid=cur_item.show.indexerid, indexer=cur_item.show.indexer,
name=cur_item.show.name, segment=cur_item.segment)]
elif isinstance(cur_item, FailedQueueItem):
length['failed'].append({'indexerid': cur_item.show.indexerid, 'indexer': cur_item.show.indexer,
'name': cur_item.show.name, 'segment': cur_item.segment})
length['failed'] += [dict(indexerid=cur_item.show.indexerid, indexer=cur_item.show.indexer,
name=cur_item.show.name, segment=cur_item.segment)]
return length
def add_item(self, item):
@ -210,7 +217,11 @@ class RecentSearchQueueItem(generic_queue.QueueItem):
self.episodes.extend(wanted_eps)
if sickbeard.DOWNLOAD_PROPERS:
properFinder.get_needed_qualites(needed)
self.update_providers(needed=needed)
self._check_for_propers(needed)
if not self.episodes:
logger.log(u'No search of cache for episodes required')
@ -244,6 +255,33 @@ class RecentSearchQueueItem(generic_queue.QueueItem):
finally:
self.finish()
@staticmethod
def _check_for_propers(needed):
if not sickbeard.DOWNLOAD_PROPERS:
return
propers = {}
my_db = db.DBConnection('cache.db')
sql_results = my_db.select('SELECT * FROM provider_cache')
re_p = (r'\brepack|proper|real\b', r'\brepack|proper|real|v[1-5]\b')[needed.need_anime]
proper_regex = re.compile(re_p, flags=re.I)
for s in sql_results:
if proper_regex.search(s['name']):
try:
show = helpers.find_show_by_id(sickbeard.showList, {INDEXER_TVDB: int(s['indexerid'])})
except (StandardError, Exception):
continue
if show:
propers.setdefault(s['provider'], []).append(
Proper(s['name'], s['url'], datetime.datetime.fromtimestamp(s['time']), show, parsed_show=show))
if propers:
logger.log('Found Proper/Repack/Real in recent search, sending data to properfinder')
propersearch_queue_item = sickbeard.search_queue.ProperSearchQueueItem(propers=propers)
sickbeard.searchQueueScheduler.action.add_item(propersearch_queue_item)
@staticmethod
def _change_missing_episodes():
if not network_timezones.network_dict:
@ -326,7 +364,8 @@ class RecentSearchQueueItem(generic_queue.QueueItem):
threads[-1].start()
if not len(providers):
logger.log('No NZB/Torrent providers in Media Providers/Options are enabled to match recent episodes', logger.WARNING)
logger.log('No NZB/Torrent providers in Media Providers/Options are enabled to match recent episodes',
logger.WARNING)
if threads:
# wait for all threads to finish
@ -337,16 +376,17 @@ class RecentSearchQueueItem(generic_queue.QueueItem):
class ProperSearchQueueItem(generic_queue.QueueItem):
def __init__(self):
def __init__(self, propers=None):
generic_queue.QueueItem.__init__(self, 'Proper Search', PROPER_SEARCH)
self.priority = generic_queue.QueuePriorities.HIGH
self.priority = (generic_queue.QueuePriorities.VERYHIGH, generic_queue.QueuePriorities.HIGH)[None is propers]
self.propers = propers
self.success = None
def run(self):
generic_queue.QueueItem.run(self)
try:
properFinder.search_propers()
properFinder.search_propers(self.propers)
finally:
self.finish()

View file

@ -20,7 +20,7 @@ import datetime
import traceback
import sickbeard
from sickbeard import logger, exceptions, ui, db, network_timezones, failed_history
from sickbeard import logger, exceptions, ui, db, network_timezones, failed_history, properFinder
from sickbeard.exceptions import ex
@ -43,6 +43,12 @@ class ShowUpdater:
logger.log('network timezone update error', logger.ERROR)
logger.log(traceback.format_exc(), logger.ERROR)
# refresh webdl types
try:
properFinder.load_webdl_types()
except (StandardError, Exception):
logger.log('error loading webdl_types', logger.DEBUG)
# update xem id lists
try:
sickbeard.scene_exceptions.get_xem_ids()

View file

@ -603,26 +603,35 @@ class MainHandler(WebHandler):
sickbeard.save_config()
@staticmethod
def getFooterTime(ajax_layout=True, *args, **kwargs):
def getFooterTime(change_layout=True, json_dump=True, *args, **kwargs):
now = datetime.datetime.now()
events = [
('search-recent', sickbeard.recentSearchScheduler.timeLeft()),
('search-backlog', sickbeard.backlogSearchScheduler.next_backlog_timeleft()),
('recent', sickbeard.recentSearchScheduler.timeLeft),
('backlog', sickbeard.backlogSearchScheduler.next_backlog_timeleft),
]
if ajax_layout:
if sickbeard.DOWNLOAD_PROPERS:
events += [('propers', sickbeard.properFinder.next_proper_timeleft)]
if change_layout not in (False, 0, '0', '', None):
sickbeard.FOOTER_TIME_LAYOUT += 1
if sickbeard.FOOTER_TIME_LAYOUT == 2: # 2 layouts = time + delta
sickbeard.FOOTER_TIME_LAYOUT = 0
sickbeard.save_config()
if 0 == sickbeard.FOOTER_TIME_LAYOUT:
next_event = [{k + '_time': sbdatetime.sbdatetime.sbftime(now + v, markup=True)} for (k, v) in events]
else:
next_event = [{k + '_timeleft': str(v).split('.')[0]} for (k, v) in events]
next_event = []
for k, v in events:
try:
t = v()
except AttributeError:
t = None
if 0 == sickbeard.FOOTER_TIME_LAYOUT:
next_event += [{k + '_time': t and sbdatetime.sbdatetime.sbftime(now + t, markup=True) or 'soon'}]
else:
next_event += [{k + '_timeleft': t and str(t).split('.')[0] or 'soon'}]
if ajax_layout:
if json_dump not in (False, 0, '0', '', None):
next_event = json.dumps(next_event)
return next_event
@ -5024,7 +5033,6 @@ class ConfigSearch(Config):
for show in sickbeard.showList if show.rls_require_words and
show.rls_require_words.strip()]
t.using_rls_require_words.sort(lambda x, y: cmp(x[1], y[1]), reverse=False)
t.propers_intervals = search_propers.ProperSearcher().search_intervals
t.using_regex = False
try:
from sickbeard.name_parser.parser import regex
@ -5038,7 +5046,7 @@ class ConfigSearch(Config):
nzbget_category=None, nzbget_priority=None, nzbget_host=None, nzbget_use_https=None,
backlog_days=None, backlog_frequency=None, search_unaired=None, unaired_recent_search_only=None,
recentsearch_frequency=None, nzb_method=None, torrent_method=None, usenet_retention=None,
download_propers=None, propers_webdl_onegrp=None, check_propers_interval=None,
download_propers=None, propers_webdl_onegrp=None,
allow_high_priority=None,
torrent_dir=None, torrent_username=None, torrent_password=None, torrent_host=None,
torrent_label=None, torrent_path=None, torrent_verify_cert=None,
@ -5077,24 +5085,6 @@ class ConfigSearch(Config):
config.change_DOWNLOAD_PROPERS(config.checkbox_to_value(download_propers))
sickbeard.PROPERS_WEBDL_ONEGRP = config.checkbox_to_value(propers_webdl_onegrp)
if sickbeard.CHECK_PROPERS_INTERVAL != check_propers_interval:
sickbeard.CHECK_PROPERS_INTERVAL = check_propers_interval
if sickbeard.DOWNLOAD_PROPERS:
proper_sch = sickbeard.properFinderScheduler
item = [(k, n, v) for (k, n, v) in proper_sch.action.search_intervals if k == check_propers_interval]
if item and None is proper_sch.start_time:
interval = datetime.timedelta(minutes=item[0][2])
run_in = proper_sch.lastRun + interval - datetime.datetime.now()
proper_sch.cycleTime = interval
run_at = 'imminent'
if datetime.timedelta() < run_in:
hours, remainder = divmod(run_in.seconds, 3600)
minutes, seconds = divmod(remainder, 60)
run_at = u'in approx. ' + ('%dh, %dm' % (hours, minutes) if 0 < hours else
'%dm, %ds' % (minutes, seconds))
logger.log(u'Change search PROPERS interval, next check %s' % run_at)
sickbeard.SEARCH_UNAIRED = bool(config.checkbox_to_value(search_unaired))
sickbeard.UNAIRED_RECENT_SEARCH_ONLY = bool(config.checkbox_to_value(unaired_recent_search_only, value_off=1, value_on=0))

41
tests/search_tests.py Normal file
View file

@ -0,0 +1,41 @@
import unittest
from sickbeard import properFinder
import sickbeard
import test_lib as test
sickbeard.SYS_ENCODING = 'UTF-8'
class ProperTests(test.SickbeardTestDBCase):
def check_webdl_type(self, cases):
for c in cases:
self.assertEqual(properFinder.get_webdl_type(*c[0]), c[1])
def check_get_codec(self, cases):
for c in cases:
self.assertEqual(properFinder._get_codec(c[0]), c[1])
def test_webdl_type(self):
self.check_webdl_type([
(('1080p.WEB.x264', 'The.Show.Name.S04E10.1080p.WEB.x264-GROUP'), 'webrip'),
(('720p.WEB-DL.DD5.1.H.264', 'The.Show.Name.720p.WEB-DL.DD5.1.H.264-GROUP'), 'webdl'),
(('1080p.AMZN.WEB-DL.DD5.1.H.264', 'The.Show.Name.1080p.AMZN.WEB-DL.DD5.1.H.264-GROUP'), 'Amazon'),
])
def test_get_codec(self):
self.check_get_codec([
('1080p.WEB.x264', '264'),
('720p.WEB.h264', '264'),
('HDTV.XviD', 'xvid'),
('720p.HEVC.x265', 'hevc'),
('1080p.HEVC.AC3', 'hevc'),
('10Bit.1080p.DD5.1.H.265', 'hevc'),
('720p.DD5.1.Widescreen.x265', 'hevc'),
])
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(ProperTests)
unittest.TextTestRunner(verbosity=2).run(suite)