From 6e2788fd8ebd33a24f4f0a2959e551aa0903e8dd Mon Sep 17 00:00:00 2001 From: Prinz23 Date: Sat, 27 Jan 2018 14:30:34 +0000 Subject: [PATCH 1/2] Change integrate proper search into recent search. Add webdl types/sources. Add basic tests for get_webdl_type() in properfinder. Add properFinder _get_codec tests. --- .../interfaces/default/config_search.tmpl | 12 - .../default/manage_manageSearches.tmpl | 21 +- sickbeard/__init__.py | 26 +- sickbeard/common.py | 18 ++ sickbeard/databases/mainDB.py | 22 +- sickbeard/db.py | 4 +- sickbeard/network_timezones.py | 5 +- sickbeard/properFinder.py | 234 +++++++++++++----- sickbeard/search_propers.py | 2 - sickbeard/search_queue.py | 72 ++++-- sickbeard/show_updater.py | 8 +- sickbeard/webserve.py | 21 +- tests/search_tests.py | 41 +++ 13 files changed, 357 insertions(+), 129 deletions(-) create mode 100644 tests/search_tests.py diff --git a/gui/slick/interfaces/default/config_search.tmpl b/gui/slick/interfaces/default/config_search.tmpl index 81393717..02da241c 100755 --- a/gui/slick/interfaces/default/config_search.tmpl +++ b/gui/slick/interfaces/default/config_search.tmpl @@ -74,18 +74,6 @@ -
- -
diff --git a/gui/slick/interfaces/default/manage_manageSearches.tmpl b/gui/slick/interfaces/default/manage_manageSearches.tmpl index f9dc39e9..4ebd01e5 100644 --- a/gui/slick/interfaces/default/manage_manageSearches.tmpl +++ b/gui/slick/interfaces/default/manage_manageSearches.tmpl @@ -148,7 +148,26 @@
- Proper: $queue_length['proper'] item$sickbeard.helpers.maybe_plural($queue_length['proper']) + Proper: $len($queue_length['proper']) item$sickbeard.helpers.maybe_plural($queue_length['proper']) +#if $queue_length['proper'] +
+ + + + #set $row = 0 + #for $cur_item in $queue_length['proper']: + #if $cur_item['recent']: + #set $search_type = 'Recent' + #else + #set $search_type = 'Scheduled' + #end if + + + + #end for + + +#end if
diff --git a/sickbeard/__init__.py b/sickbeard/__init__.py index d4caa92a..c876944b 100755 --- a/sickbeard/__init__.py +++ b/sickbeard/__init__.py @@ -37,7 +37,7 @@ sys.path.insert(1, os.path.abspath('../lib')) from sickbeard import helpers, encodingKludge as ek from sickbeard import db, image_cache, logger, naming, metadata, providers, scene_exceptions, scene_numbering, \ scheduler, auto_post_processer, search_queue, search_propers, search_recent, search_backlog, \ - show_queue, show_updater, subtitles, traktChecker, version_checker, indexermapper, classes + show_queue, show_updater, subtitles, traktChecker, version_checker, indexermapper, classes, properFinder from sickbeard.config import CheckSection, check_setting_int, check_setting_str, ConfigMigrator, minimax from sickbeard.common import SD, SKIPPED from sickbeard.databases import mainDB, cache_db, failed_db @@ -210,8 +210,8 @@ USENET_RETENTION = None TORRENT_METHOD = None TORRENT_DIR = None DOWNLOAD_PROPERS = False -CHECK_PROPERS_INTERVAL = None PROPERS_WEBDL_ONEGRP = True +WEBDL_TYPES = [] ALLOW_HIGH_PRIORITY = False NEWZNAB_DATA = '' @@ -595,7 +595,7 @@ def initialize(console_logging=True): global BRANCH, CUR_COMMIT_BRANCH, GIT_REMOTE, CUR_COMMIT_HASH, GIT_PATH, CPU_PRESET, ANON_REDIRECT, \ ENCRYPTION_VERSION, PROXY_SETTING, PROXY_INDEXERS, FILE_LOGGING_PRESET # Search Settings/Episode - global DOWNLOAD_PROPERS, PROPERS_WEBDL_ONEGRP, CHECK_PROPERS_INTERVAL, RECENTSEARCH_FREQUENCY, \ + global DOWNLOAD_PROPERS, PROPERS_WEBDL_ONEGRP, WEBDL_TYPES, RECENTSEARCH_FREQUENCY, \ BACKLOG_DAYS, BACKLOG_NOFULL, BACKLOG_FREQUENCY, USENET_RETENTION, IGNORE_WORDS, REQUIRE_WORDS, \ ALLOW_HIGH_PRIORITY, SEARCH_UNAIRED, UNAIRED_RECENT_SEARCH_ONLY # Search Settings/NZB search @@ -846,9 +846,6 @@ def initialize(console_logging=True): DOWNLOAD_PROPERS = bool(check_setting_int(CFG, 'General', 'download_propers', 1)) PROPERS_WEBDL_ONEGRP = bool(check_setting_int(CFG, 'General', 'propers_webdl_onegrp', 1)) - CHECK_PROPERS_INTERVAL = check_setting_str(CFG, 'General', 'check_propers_interval', '') - if CHECK_PROPERS_INTERVAL not in ('15m', '45m', '90m', '4h', 'daily'): - CHECK_PROPERS_INTERVAL = 'daily' ALLOW_HIGH_PRIORITY = bool(check_setting_int(CFG, 'General', 'allow_high_priority', 1)) @@ -1375,19 +1372,17 @@ def initialize(console_logging=True): prevent_cycle_run=searchQueueScheduler.action.is_standard_backlog_in_progress) propers_searcher = search_propers.ProperSearcher() - item = [(k, n, v) for (k, n, v) in propers_searcher.search_intervals if k == CHECK_PROPERS_INTERVAL] - if item: - update_interval = datetime.timedelta(minutes=item[0][2]) - run_at = None + last_proper_search = datetime.datetime.fromtimestamp(properFinder.get_last_proper_search()) + time_diff = datetime.timedelta(days=1) - (datetime.datetime.now() - last_proper_search) + if time_diff < datetime.timedelta(seconds=0): + properdelay = 20 else: - update_interval = datetime.timedelta(hours=1) - run_at = datetime.time(hour=1) # 1 AM + properdelay = helpers.tryInt((time_diff.total_seconds() / 60) + 5, 20) properFinderScheduler = scheduler.Scheduler( propers_searcher, - cycleTime=update_interval, - run_delay=update_interval, - start_time=run_at, + cycleTime=datetime.timedelta(days=1), + run_delay=datetime.timedelta(minutes=properdelay), threadName='FINDPROPERS', prevent_cycle_run=searchQueueScheduler.action.is_propersearch_in_progress) @@ -1579,7 +1574,6 @@ def save_config(): new_config['General']['update_frequency'] = int(UPDATE_FREQUENCY) new_config['General']['download_propers'] = int(DOWNLOAD_PROPERS) new_config['General']['propers_webdl_onegrp'] = int(PROPERS_WEBDL_ONEGRP) - new_config['General']['check_propers_interval'] = CHECK_PROPERS_INTERVAL new_config['General']['allow_high_priority'] = int(ALLOW_HIGH_PRIORITY) new_config['General']['recentsearch_startup'] = int(RECENTSEARCH_STARTUP) new_config['General']['backlog_nofull'] = int(BACKLOG_NOFULL) diff --git a/sickbeard/common.py b/sickbeard/common.py index 583c7d96..75a57104 100644 --- a/sickbeard/common.py +++ b/sickbeard/common.py @@ -546,6 +546,24 @@ class neededQualities(object): if isinstance(v, bool) and True is v: self.need_sd = self.need_hd = self.need_uhd = self.need_webdl = True + def all_show_qualities_needed(self, show): + from sickbeard.tv import TVShow + if isinstance(show, TVShow): + init, upgrade = Quality.splitQuality(show.quality) + all_qual = set(init + upgrade) + need_sd = need_hd = need_uhd = need_webdl = False + for wanted_qualities in all_qual: + if not need_sd and wanted_qualities <= neededQualities.max_sd: + need_sd = True + if not need_hd and wanted_qualities in neededQualities.hd_qualities: + need_hd = True + if not need_webdl and wanted_qualities in neededQualities.webdl_qualities: + need_webdl = True + if not need_uhd and wanted_qualities > neededQualities.max_hd: + need_uhd = True + return self.need_sd == need_sd and self.need_hd == need_hd and self.need_webdl == need_webdl and \ + self.need_uhd == need_uhd + def check_needed_types(self, show): if getattr(show, 'is_anime', False): self.need_anime = True diff --git a/sickbeard/databases/mainDB.py b/sickbeard/databases/mainDB.py index 9fd845e2..099b4c34 100644 --- a/sickbeard/databases/mainDB.py +++ b/sickbeard/databases/mainDB.py @@ -27,7 +27,7 @@ from sickbeard import encodingKludge as ek from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException MIN_DB_VERSION = 9 # oldest db version we support migrating from -MAX_DB_VERSION = 20006 +MAX_DB_VERSION = 20008 class MainSanityCheck(db.DBSanityCheck): @@ -1258,3 +1258,23 @@ class AddFlagTable(db.SchemaUpgrade): self.setDBVersion(20006) return self.checkDBVersion() + + +# 20006 -> 20007 +class DBIncreaseTo20007(db.SchemaUpgrade): + def execute(self): + + logger.log(u'Bumping database version') + + self.setDBVersion(20007) + return self.checkDBVersion() + + +# 20007 -> 20008 +class AddWebdlTypesTable(db.SchemaUpgrade): + def execute(self): + db.backup_database('sickbeard.db', self.checkDBVersion()) + self.connection.action('CREATE TABLE webdl_types (dname TEXT NOT NULL , regex TEXT NOT NULL )') + + self.setDBVersion(20008) + return self.checkDBVersion() diff --git a/sickbeard/db.py b/sickbeard/db.py index 75495638..fa439faa 100644 --- a/sickbeard/db.py +++ b/sickbeard/db.py @@ -512,7 +512,9 @@ def MigrationCode(myDB): 20002: sickbeard.mainDB.AddTvShowTags, 20003: sickbeard.mainDB.ChangeMapIndexer, 20004: sickbeard.mainDB.AddShowNotFoundCounter, - 20005: sickbeard.mainDB.AddFlagTable + 20005: sickbeard.mainDB.AddFlagTable, + 20006: sickbeard.mainDB.DBIncreaseTo20007, + 20007: sickbeard.mainDB.AddWebdlTypesTable, # 20002: sickbeard.mainDB.AddCoolSickGearFeature3, } diff --git a/sickbeard/network_timezones.py b/sickbeard/network_timezones.py index 22dc1c79..a5076eb8 100644 --- a/sickbeard/network_timezones.py +++ b/sickbeard/network_timezones.py @@ -226,7 +226,10 @@ def update_network_dict(): try: for line in url_data.splitlines(): - (key, val) = line.decode('utf-8').strip().rsplit(u':', 1) + try: + (key, val) = line.decode('utf-8').strip().rsplit(u':', 1) + except (StandardError, Exception): + continue if key is None or val is None: continue d[key] = val diff --git a/sickbeard/properFinder.py b/sickbeard/properFinder.py index fb01453d..a9780705 100644 --- a/sickbeard/properFinder.py +++ b/sickbeard/properFinder.py @@ -27,27 +27,28 @@ import sickbeard from sickbeard import db, exceptions, helpers, history, logger, search, show_name_helpers from sickbeard import encodingKludge as ek -from sickbeard.common import DOWNLOADED, SNATCHED_ANY, SNATCHED_PROPER, Quality, ARCHIVED, FAILED +from sickbeard.common import DOWNLOADED, SNATCHED_ANY, SNATCHED_PROPER, Quality, ARCHIVED, FAILED, neededQualities from sickbeard.exceptions import ex, MultipleShowObjectsException from sickbeard import failed_history from sickbeard.history import dateFormat +from sickbeard.sbdatetime import sbdatetime from name_parser.parser import NameParser, InvalidNameException, InvalidShowException -def search_propers(): +def search_propers(proper_list=None): if not sickbeard.DOWNLOAD_PROPERS: return - logger.log(u'Beginning search for new propers') + logger.log(('Checking propers from Recent search', 'Beginning search for new propers')[None is proper_list]) age_shows, age_anime = sickbeard.BACKLOG_DAYS + 2, 14 aired_since_shows = datetime.datetime.today() - datetime.timedelta(days=age_shows) aired_since_anime = datetime.datetime.today() - datetime.timedelta(days=age_anime) recent_shows, recent_anime = _recent_history(aired_since_shows, aired_since_anime) if recent_shows or recent_anime: - propers = _get_proper_list(aired_since_shows, recent_shows, recent_anime) + propers = _get_proper_list(aired_since_shows, recent_shows, recent_anime, proper_list=proper_list) if propers: _download_propers(propers) @@ -55,52 +56,59 @@ def search_propers(): logger.log(u'No downloads or snatches found for the last %s%s days to use for a propers search' % (age_shows, ('', ' (%s for anime)' % age_anime)[helpers.has_anime()])) - _set_last_proper_search(datetime.datetime.today().toordinal()) - run_at = '' - proper_sch = sickbeard.properFinderScheduler - if None is proper_sch.start_time: - run_in = proper_sch.lastRun + proper_sch.cycleTime - datetime.datetime.now() - run_at = u', next check ' - if datetime.timedelta() > run_in: - run_at += u'imminent' - else: - hours, remainder = divmod(run_in.seconds, 3600) - minutes, seconds = divmod(remainder, 60) - run_at += u'in approx. ' + ('%dh, %dm' % (hours, minutes) if 0 < hours else '%dm, %ds' % (minutes, seconds)) + if None is proper_list: + _set_last_proper_search(datetime.datetime.now()) + + proper_sch = sickbeard.properFinderScheduler + if None is proper_sch.start_time: + run_in = proper_sch.lastRun + proper_sch.cycleTime - datetime.datetime.now() + run_at = u', next check ' + if datetime.timedelta() > run_in: + run_at += u'imminent' + else: + hours, remainder = divmod(run_in.seconds, 3600) + minutes, seconds = divmod(remainder, 60) + run_at += u'in approx. ' + ('%dh, %dm' % (hours, minutes) if 0 < hours else + '%dm, %ds' % (minutes, seconds)) + else: + run_at = ' send from recent search' logger.log(u'Completed the search for new propers%s' % run_at) -def get_old_proper_level(showObj, indexer, indexerid, season, episodes, old_status, new_quality, +def get_old_proper_level(show_obj, indexer, indexerid, season, episodes, old_status, new_quality, extra_no_name, version, is_anime=False): level = 0 is_internal = False codec = '' + rel_name = None if old_status not in SNATCHED_ANY: level = Quality.get_proper_level(extra_no_name, version, is_anime) - elif showObj: - myDB = db.DBConnection() - np = NameParser(False, showObj=showObj) + elif show_obj: + my_db = db.DBConnection() + np = NameParser(False, showObj=show_obj) for episode in episodes: - result = myDB.select('SELECT resource FROM history WHERE showid = ? AND season = ? AND episode = ? AND ' - '(' + ' OR '.join("action LIKE '%%%02d'" % x for x in SNATCHED_ANY) + ') ' - 'ORDER BY date DESC LIMIT 1', - [indexerid, season, episode]) + result = my_db.select('SELECT resource FROM history WHERE showid = ? AND season = ? AND episode = ? AND ' + '(' + ' OR '.join("action LIKE '%%%02d'" % x for x in SNATCHED_ANY) + ') ' + 'ORDER BY date DESC LIMIT 1', + [indexerid, season, episode]) if not result or not isinstance(result[0]['resource'], basestring) or not result[0]['resource']: continue - nq = Quality.sceneQuality(result[0]['resource'], showObj.is_anime) + nq = Quality.sceneQuality(result[0]['resource'], show_obj.is_anime) if nq != new_quality: continue try: p = np.parse(result[0]['resource']) except (StandardError, Exception): continue - level = Quality.get_proper_level(p.extra_info_no_name(), p.version, showObj.is_anime) + level = Quality.get_proper_level(p.extra_info_no_name(), p.version, show_obj.is_anime) + extra_no_name = p.extra_info_no_name() + rel_name = result[0]['resource'] is_internal = p.extra_info_no_name() and re.search(r'\binternal\b', p.extra_info_no_name(), flags=re.I) codec = _get_codec(p.extra_info_no_name()) break - return level, is_internal, codec + return level, is_internal, codec, extra_no_name, rel_name def _get_codec(extra_info_no_name): @@ -110,12 +118,62 @@ def _get_codec(extra_info_no_name): return '264' elif re.search(r'\bxvid\b', extra_info_no_name, flags=re.I): return 'xvid' - elif re.search(r'\b[xh]265|hevc\b', extra_info_no_name, flags=re.I): + elif re.search(r'\b[xh]\W?265|hevc\b', extra_info_no_name, flags=re.I): return 'hevc' return '' -def _get_proper_list(aired_since_shows, recent_shows, recent_anime): +def get_webdl_type(extra_info_no_name, rel_name): + if not sickbeard.WEBDL_TYPES: + load_webdl_types() + + for t in sickbeard.WEBDL_TYPES: + try: + if re.search(r'\b%s\b' % t[1], extra_info_no_name, flags=re.I): + return t[0] + except (StandardError, Exception): + continue + + return ('webdl', 'webrip')[None is re.search(r'\bweb.?dl\b', rel_name, flags=re.I)] + + +def load_webdl_types(): + new_types = [] + default_types = [('Amazon', r'AMZN|AMAZON'), ('Netflix', r'NETFLIX|NF'), ('Hulu', r'HULU')] + url = 'https://raw.githubusercontent.com/SickGear/sickgear.extdata/master/SickGear/webdl_types.txt' + url_data = helpers.getURL(url) + if url_data: + try: + for line in url_data.splitlines(): + try: + (key, val) = line.decode('utf-8').strip().split(u'::', 1) + except (StandardError, Exception): + continue + if key is None or val is None: + continue + new_types.append((key, val)) + except (IOError, OSError): + pass + + my_db = db.DBConnection() + sql_results = my_db.select('SELECT * FROM webdl_types') + old_types = [(r['dname'], r['regex']) for r in sql_results] + cl = [] + for nt in new_types: + if nt not in old_types: + cl.append(['REPLACE INTO webdl_types (dname, regex) VALUES (?,?)', [nt[0], nt[1]]]) + + for ot in old_types: + if ot not in new_types: + cl.append(['DELETE FROM webdl_types WHERE dname = ? AND regex = ?', [ot[0], ot[1]]]) + + if cl: + my_db.mass_action(cl) + + sickbeard.WEBDL_TYPES = new_types + default_types + + +def _get_proper_list(aired_since_shows, recent_shows, recent_anime, proper_list=None): propers = {} # for each provider get a list of the @@ -124,22 +182,28 @@ def _get_proper_list(aired_since_shows, recent_shows, recent_anime): for cur_provider in providers: if not recent_anime and cur_provider.anime_only: continue - threading.currentThread().name = orig_thread_name + ' :: [' + cur_provider.name + ']' - logger.log(u'Searching for new PROPER releases') + if None is not proper_list: + found_propers = proper_list.get(cur_provider.get_id(), []) + if not found_propers: + continue + else: + threading.currentThread().name = orig_thread_name + ' :: [' + cur_provider.name + ']' - try: - found_propers = cur_provider.find_propers(search_date=aired_since_shows, shows=recent_shows, - anime=recent_anime) - except exceptions.AuthException as e: - logger.log(u'Authentication error: ' + ex(e), logger.ERROR) - continue - except Exception as e: - logger.log(u'Error while searching ' + cur_provider.name + ', skipping: ' + ex(e), logger.ERROR) - logger.log(traceback.format_exc(), logger.ERROR) - continue - finally: - threading.currentThread().name = orig_thread_name + logger.log(u'Searching for new PROPER releases') + + try: + found_propers = cur_provider.find_propers(search_date=aired_since_shows, shows=recent_shows, + anime=recent_anime) + except exceptions.AuthException as e: + logger.log(u'Authentication error: ' + ex(e), logger.ERROR) + continue + except Exception as e: + logger.log(u'Error while searching ' + cur_provider.name + ', skipping: ' + ex(e), logger.ERROR) + logger.log(traceback.format_exc(), logger.ERROR) + continue + finally: + threading.currentThread().name = orig_thread_name # if they haven't been added by a different provider than add the proper to the list count = 0 @@ -162,7 +226,7 @@ def _get_proper_list(aired_since_shows, recent_shows, recent_anime): parse_result.is_anime, check_is_repack=True) x.is_internal = parse_result.extra_info_no_name() and \ - re.search(r'\binternal\b', parse_result.extra_info_no_name(), flags=re.I) + re.search(r'\binternal\b', parse_result.extra_info_no_name(), flags=re.I) x.codec = _get_codec(parse_result.extra_info_no_name()) propers[name] = x count += 1 @@ -255,11 +319,12 @@ def _get_proper_list(aired_since_shows, recent_shows, recent_anime): except (StandardError, Exception): extra_info = None - old_proper_level, old_is_internal, old_codec = get_old_proper_level(parse_result.show, cur_proper.indexer, - cur_proper.indexerid, cur_proper.season, - parse_result.episode_numbers, old_status, - cur_proper.quality, extra_info, - cur_proper.version, cur_proper.is_anime) + old_proper_level, old_is_internal, old_codec, old_extra_no_name, old_name = \ + get_old_proper_level(parse_result.show, cur_proper.indexer, cur_proper.indexerid, cur_proper.season, + parse_result.episode_numbers, old_status, cur_proper.quality, extra_info, + cur_proper.version, cur_proper.is_anime) + + old_name = (old_name, sql_results[0]['release_name'])[old_name in ('', None)] if cur_proper.proper_level < old_proper_level: continue elif cur_proper.proper_level == old_proper_level: @@ -273,11 +338,20 @@ def _get_proper_list(aired_since_shows, recent_shows, recent_anime): log_same_grp = 'Skipping proper from release group: [%s], does not match existing release group: [%s] for [%s]'\ % (cur_proper.release_group, old_release_group, cur_proper.name) + is_web = (old_quality in (Quality.HDWEBDL, Quality.FULLHDWEBDL, Quality.UHD4KWEB) or + (old_quality == Quality.SDTV and re.search(r'\Wweb.?(dl|rip|.[hx]26[45])\W', + str(sql_results[0]['release_name']), re.I))) + + if is_web: + old_webdl_type = get_webdl_type(old_extra_no_name, old_name) + new_webdl_type = get_webdl_type(cur_proper.extra_info_no_name, cur_proper.name) + if old_webdl_type != new_webdl_type: + logger.log('Skipping proper webdl source: [%s], does not match existing webdl source: [%s] for [%s]' + % (old_webdl_type, new_webdl_type, cur_proper.name), logger.DEBUG) + continue + # for webldls, prevent propers from different groups - if sickbeard.PROPERS_WEBDL_ONEGRP and \ - (old_quality in (Quality.HDWEBDL, Quality.FULLHDWEBDL, Quality.UHD4KWEB) or - (old_quality == Quality.SDTV and re.search(r'\Wweb.?(dl|rip|.[hx]26[45])\W', str(sql_results[0]['release_name']), re.I))) and \ - cur_proper.release_group != old_release_group: + if sickbeard.PROPERS_WEBDL_ONEGRP and is_web and cur_proper.release_group != old_release_group: logger.log(log_same_grp, logger.DEBUG) continue @@ -375,6 +449,46 @@ def _download_propers(proper_list): search.snatch_episode(result, SNATCHED_PROPER) +def get_needed_qualites(needed=None): + if not isinstance(needed, neededQualities): + needed = neededQualities() + if not sickbeard.DOWNLOAD_PROPERS or needed.all_needed: + return needed + + age_shows, age_anime = sickbeard.BACKLOG_DAYS + 2, 14 + aired_since_shows = datetime.datetime.today() - datetime.timedelta(days=age_shows) + aired_since_anime = datetime.datetime.today() - datetime.timedelta(days=age_anime) + + my_db = db.DBConnection() + sql_results = my_db.select( + 'SELECT DISTINCT s.indexer, s.indexer_id, e.season, e.episode FROM history as h' + + ' INNER JOIN tv_episodes AS e ON (h.showid == e.showid AND h.season == e.season AND h.episode == e.episode)' + + ' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' + + ' WHERE h.date >= %s' % min(aired_since_shows, aired_since_anime).strftime(dateFormat) + + ' AND (%s)' % ' OR '.join(['h.action LIKE "%%%02d"' % x for x in SNATCHED_ANY + [DOWNLOADED, FAILED]]) + ) + + for sql_episode in sql_results: + if needed.all_needed: + break + try: + show = helpers.find_show_by_id( + sickbeard.showList, {int(sql_episode['indexer']): int(sql_episode['indexer_id'])}) + except MultipleShowObjectsException: + continue + if show: + needed.check_needed_types(show) + if needed.all_show_qualities_needed(show) or needed.all_qualities_needed: + continue + ep_obj = show.getEpisode(season=sql_episode['season'], episode=sql_episode['episode']) + if ep_obj: + ep_status, ep_quality = Quality.splitCompositeStatus(ep_obj.status) + if ep_status in SNATCHED_ANY + [DOWNLOADED, ARCHIVED]: + needed.check_needed_qualities([ep_quality]) + + return needed + + def _recent_history(aired_since_shows, aired_since_anime): recent_shows, recent_anime = [], [] @@ -418,19 +532,23 @@ def _set_last_proper_search(when): if 0 == len(sql_results): my_db.action('INSERT INTO info (last_backlog, last_indexer, last_proper_search) VALUES (?,?,?)', - [0, 0, str(when)]) + [0, 0, sbdatetime.totimestamp(when)]) else: - my_db.action('UPDATE info SET last_proper_search=%s' % when) + my_db.action('UPDATE info SET last_proper_search=%s' % sbdatetime.totimestamp(when)) -def _get_last_proper_search(): +def next_proper_timeleft(): + return sickbeard.properFinderScheduler.timeLeft() + + +def get_last_proper_search(): my_db = db.DBConnection() sql_results = my_db.select('SELECT * FROM info') try: - last_proper_search = datetime.date.fromordinal(int(sql_results[0]['last_proper_search'])) + last_proper_search = int(sql_results[0]['last_proper_search']) except (StandardError, Exception): - return datetime.date.fromordinal(1) + return 1 return last_proper_search diff --git a/sickbeard/search_propers.py b/sickbeard/search_propers.py index 923be08c..51b57054 100644 --- a/sickbeard/search_propers.py +++ b/sickbeard/search_propers.py @@ -26,8 +26,6 @@ class ProperSearcher: def __init__(self): self.lock = threading.Lock() self.amActive = False - self.search_intervals = [('daily', '24 hours', 24 * 60), ('4h', '4 hours', 4 * 60), - ('90m', '90 mins', 90), ('45m', '45 mins', 45), ('15m', '15 mins', 15)] @staticmethod def check_paused(): diff --git a/sickbeard/search_queue.py b/sickbeard/search_queue.py index 7a181522..571b8050 100644 --- a/sickbeard/search_queue.py +++ b/sickbeard/search_queue.py @@ -21,11 +21,14 @@ from __future__ import with_statement import traceback import threading import datetime +import re import sickbeard from sickbeard import db, logger, common, exceptions, helpers, network_timezones, generic_queue, search, \ failed_history, history, ui, properFinder from sickbeard.search import wanted_episodes, get_aired_in_season, set_wanted_aired +from sickbeard.classes import Proper +from sickbeard.indexers.indexer_config import INDEXER_TVDB search_queue_lock = threading.Lock() @@ -109,7 +112,11 @@ class SearchQueue(generic_queue.GenericQueue): return self._is_in_progress(RecentSearchQueueItem) def is_propersearch_in_progress(self): - return self._is_in_progress(ProperSearchQueueItem) + with self.lock: + for cur_item in self.queue + [self.currentItem]: + if isinstance(cur_item, ProperSearchQueueItem) and None is cur_item.propers: + return True + return False def is_standard_backlog_in_progress(self): with self.lock: @@ -141,25 +148,25 @@ class SearchQueue(generic_queue.GenericQueue): return message def queue_length(self): - length = {'backlog': [], 'recent': 0, 'manual': [], 'failed': [], 'proper': 0} + length = {'backlog': [], 'recent': 0, 'manual': [], 'failed': [], 'proper': []} with self.lock: for cur_item in [self.currentItem] + self.queue: if isinstance(cur_item, RecentSearchQueueItem): length['recent'] += 1 elif isinstance(cur_item, BacklogQueueItem): - length['backlog'].append({'indexerid': cur_item.show.indexerid, 'indexer': cur_item.show.indexer, - 'name': cur_item.show.name, 'segment': cur_item.segment, - 'standard_backlog': cur_item.standard_backlog, - 'limited_backlog': cur_item.limited_backlog, 'forced': cur_item.forced, - 'torrent_only': cur_item.torrent_only}) + length['backlog'] += [dict(indexerid=cur_item.show.indexerid, indexer=cur_item.show.indexer, + name=cur_item.show.name, segment=cur_item.segment, + standard_backlog=cur_item.standard_backlog, + limited_backlog=cur_item.limited_backlog, forced=cur_item.forced, + torrent_only=cur_item.torrent_only)] elif isinstance(cur_item, ProperSearchQueueItem): - length['proper'] += 1 + length['proper'] += [dict(recent=None is not cur_item.propers)] elif isinstance(cur_item, ManualSearchQueueItem): - length['manual'].append({'indexerid': cur_item.show.indexerid, 'indexer': cur_item.show.indexer, - 'name': cur_item.show.name, 'segment': cur_item.segment}) + length['manual'] += [dict(indexerid=cur_item.show.indexerid, indexer=cur_item.show.indexer, + name=cur_item.show.name, segment=cur_item.segment)] elif isinstance(cur_item, FailedQueueItem): - length['failed'].append({'indexerid': cur_item.show.indexerid, 'indexer': cur_item.show.indexer, - 'name': cur_item.show.name, 'segment': cur_item.segment}) + length['failed'] += [dict(indexerid=cur_item.show.indexerid, indexer=cur_item.show.indexer, + name=cur_item.show.name, segment=cur_item.segment)] return length def add_item(self, item): @@ -210,7 +217,11 @@ class RecentSearchQueueItem(generic_queue.QueueItem): self.episodes.extend(wanted_eps) + if sickbeard.DOWNLOAD_PROPERS: + properFinder.get_needed_qualites(needed) + self.update_providers(needed=needed) + self._check_for_propers(needed) if not self.episodes: logger.log(u'No search of cache for episodes required') @@ -244,6 +255,33 @@ class RecentSearchQueueItem(generic_queue.QueueItem): finally: self.finish() + @staticmethod + def _check_for_propers(needed): + if not sickbeard.DOWNLOAD_PROPERS: + return + + propers = {} + my_db = db.DBConnection('cache.db') + sql_results = my_db.select('SELECT * FROM provider_cache') + re_p = (r'\brepack|proper|real\b', r'\brepack|proper|real|v[1-5]\b')[needed.need_anime] + + proper_regex = re.compile(re_p, flags=re.I) + + for s in sql_results: + if proper_regex.search(s['name']): + try: + show = helpers.find_show_by_id(sickbeard.showList, {INDEXER_TVDB: int(s['indexerid'])}) + except (StandardError, Exception): + continue + if show: + propers.setdefault(s['provider'], []).append( + Proper(s['name'], s['url'], datetime.datetime.fromtimestamp(s['time']), show, parsed_show=show)) + + if propers: + logger.log('Found Proper/Repack/Real in Recent Search, sending data to properfinder') + propersearch_queue_item = sickbeard.search_queue.ProperSearchQueueItem(propers=propers) + sickbeard.searchQueueScheduler.action.add_item(propersearch_queue_item) + @staticmethod def _change_missing_episodes(): if not network_timezones.network_dict: @@ -326,7 +364,8 @@ class RecentSearchQueueItem(generic_queue.QueueItem): threads[-1].start() if not len(providers): - logger.log('No NZB/Torrent providers in Media Providers/Options are enabled to match recent episodes', logger.WARNING) + logger.log('No NZB/Torrent providers in Media Providers/Options are enabled to match recent episodes', + logger.WARNING) if threads: # wait for all threads to finish @@ -337,16 +376,17 @@ class RecentSearchQueueItem(generic_queue.QueueItem): class ProperSearchQueueItem(generic_queue.QueueItem): - def __init__(self): + def __init__(self, propers=None): generic_queue.QueueItem.__init__(self, 'Proper Search', PROPER_SEARCH) - self.priority = generic_queue.QueuePriorities.HIGH + self.priority = (generic_queue.QueuePriorities.VERYHIGH, generic_queue.QueuePriorities.HIGH)[None is propers] + self.propers = propers self.success = None def run(self): generic_queue.QueueItem.run(self) try: - properFinder.search_propers() + properFinder.search_propers(self.propers) finally: self.finish() diff --git a/sickbeard/show_updater.py b/sickbeard/show_updater.py index 8069fca5..1d2fc27a 100644 --- a/sickbeard/show_updater.py +++ b/sickbeard/show_updater.py @@ -20,7 +20,7 @@ import datetime import traceback import sickbeard -from sickbeard import logger, exceptions, ui, db, network_timezones, failed_history +from sickbeard import logger, exceptions, ui, db, network_timezones, failed_history, properFinder from sickbeard.exceptions import ex @@ -43,6 +43,12 @@ class ShowUpdater: logger.log('network timezone update error', logger.ERROR) logger.log(traceback.format_exc(), logger.ERROR) + # refresh webdl types + try: + properFinder.load_webdl_types() + except (StandardError, Exception): + logger.log('error loading webdl_types', logger.DEBUG) + # update xem id lists try: sickbeard.scene_exceptions.get_xem_ids() diff --git a/sickbeard/webserve.py b/sickbeard/webserve.py index 50b4e7cf..4d9419da 100644 --- a/sickbeard/webserve.py +++ b/sickbeard/webserve.py @@ -5024,7 +5024,6 @@ class ConfigSearch(Config): for show in sickbeard.showList if show.rls_require_words and show.rls_require_words.strip()] t.using_rls_require_words.sort(lambda x, y: cmp(x[1], y[1]), reverse=False) - t.propers_intervals = search_propers.ProperSearcher().search_intervals t.using_regex = False try: from sickbeard.name_parser.parser import regex @@ -5038,7 +5037,7 @@ class ConfigSearch(Config): nzbget_category=None, nzbget_priority=None, nzbget_host=None, nzbget_use_https=None, backlog_days=None, backlog_frequency=None, search_unaired=None, unaired_recent_search_only=None, recentsearch_frequency=None, nzb_method=None, torrent_method=None, usenet_retention=None, - download_propers=None, propers_webdl_onegrp=None, check_propers_interval=None, + download_propers=None, propers_webdl_onegrp=None, allow_high_priority=None, torrent_dir=None, torrent_username=None, torrent_password=None, torrent_host=None, torrent_label=None, torrent_path=None, torrent_verify_cert=None, @@ -5077,24 +5076,6 @@ class ConfigSearch(Config): config.change_DOWNLOAD_PROPERS(config.checkbox_to_value(download_propers)) sickbeard.PROPERS_WEBDL_ONEGRP = config.checkbox_to_value(propers_webdl_onegrp) - if sickbeard.CHECK_PROPERS_INTERVAL != check_propers_interval: - sickbeard.CHECK_PROPERS_INTERVAL = check_propers_interval - - if sickbeard.DOWNLOAD_PROPERS: - proper_sch = sickbeard.properFinderScheduler - item = [(k, n, v) for (k, n, v) in proper_sch.action.search_intervals if k == check_propers_interval] - if item and None is proper_sch.start_time: - interval = datetime.timedelta(minutes=item[0][2]) - run_in = proper_sch.lastRun + interval - datetime.datetime.now() - proper_sch.cycleTime = interval - - run_at = 'imminent' - if datetime.timedelta() < run_in: - hours, remainder = divmod(run_in.seconds, 3600) - minutes, seconds = divmod(remainder, 60) - run_at = u'in approx. ' + ('%dh, %dm' % (hours, minutes) if 0 < hours else - '%dm, %ds' % (minutes, seconds)) - logger.log(u'Change search PROPERS interval, next check %s' % run_at) sickbeard.SEARCH_UNAIRED = bool(config.checkbox_to_value(search_unaired)) sickbeard.UNAIRED_RECENT_SEARCH_ONLY = bool(config.checkbox_to_value(unaired_recent_search_only, value_off=1, value_on=0)) diff --git a/tests/search_tests.py b/tests/search_tests.py new file mode 100644 index 00000000..c4a2aad2 --- /dev/null +++ b/tests/search_tests.py @@ -0,0 +1,41 @@ +import unittest + +from sickbeard import properFinder + +import sickbeard +import test_lib as test + +sickbeard.SYS_ENCODING = 'UTF-8' + + +class ProperTests(test.SickbeardTestDBCase): + def check_webdl_type(self, cases): + for c in cases: + self.assertEqual(properFinder.get_webdl_type(*c[0]), c[1]) + + def check_get_codec(self, cases): + for c in cases: + self.assertEqual(properFinder._get_codec(c[0]), c[1]) + + def test_webdl_type(self): + self.check_webdl_type([ + (('1080p.WEB.x264', 'The.Show.Name.S04E10.1080p.WEB.x264-GROUP'), 'webrip'), + (('720p.WEB-DL.DD5.1.H.264', 'The.Show.Name.720p.WEB-DL.DD5.1.H.264-GROUP'), 'webdl'), + (('1080p.AMZN.WEB-DL.DD5.1.H.264', 'The.Show.Name.1080p.AMZN.WEB-DL.DD5.1.H.264-GROUP'), 'Amazon'), + ]) + + def test_get_codec(self): + self.check_get_codec([ + ('1080p.WEB.x264', '264'), + ('720p.WEB.h264', '264'), + ('HDTV.XviD', 'xvid'), + ('720p.HEVC.x265', 'hevc'), + ('1080p.HEVC.AC3', 'hevc'), + ('10Bit.1080p.DD5.1.H.265', 'hevc'), + ('720p.DD5.1.Widescreen.x265', 'hevc'), + ]) + + +if __name__ == '__main__': + suite = unittest.TestLoader().loadTestsFromTestCase(ProperTests) + unittest.TextTestRunner(verbosity=2).run(suite) From 56d8fffb9f5a78050e04f3674430f3215a427188 Mon Sep 17 00:00:00 2001 From: JackDandy Date: Sat, 27 Jan 2018 16:17:21 +0000 Subject: [PATCH 2/2] Change UI footer tweaks + only display footer propers time if option is actually enabled. --- CHANGES.md | 3 ++ gui/slick/css/style.css | 6 +-- gui/slick/interfaces/default/inc_bottom.tmpl | 39 ++++++++++++------- .../default/manage_manageSearches.tmpl | 2 +- gui/slick/js/config.js | 5 +++ sickbeard/properFinder.py | 8 ++-- sickbeard/search_queue.py | 2 +- sickbeard/webserve.py | 27 ++++++++----- 8 files changed, 61 insertions(+), 31 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index cfd99641..c9455fa7 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -8,6 +8,9 @@ * Add failure handling, skip provider for x hour(s) depending on count of failures * Add detection of Too Many Requests (Supporting providers UC and BTN) * Add footer icon button to switch time layouts +* Add performance gains for proper search by integrating it into recent search +* Add the once per day proper finder time to footer, this process catches any propers missed during recent searches +* Add ability to differentiate webdl/rip sources so overwriting propers is always done from the same source (e.g. AMZN) [develop changelog] diff --git a/gui/slick/css/style.css b/gui/slick/css/style.css index 3542c285..c3c1c575 100644 --- a/gui/slick/css/style.css +++ b/gui/slick/css/style.css @@ -643,7 +643,7 @@ inc_bottom.tmpl opacity:0.4;filter:alpha(opacity=40); float:none; display:inline-block; - margin:0 0 -2px 0; + margin:0 0 -1px 2px; height:12px; width:14px } @@ -653,11 +653,11 @@ inc_bottom.tmpl } .footer .icon-glyph.timeleft, .footer .icon-glyph.time:hover{ - background-position:-48px -25px + background-position:-49px -25px } .footer .icon-glyph.time, .footer .icon-glyph.timeleft:hover{ - background-position:-192px -121px + background-position:-193px -121px } /* ======================================================================= diff --git a/gui/slick/interfaces/default/inc_bottom.tmpl b/gui/slick/interfaces/default/inc_bottom.tmpl index 36acea1d..6b8d336e 100644 --- a/gui/slick/interfaces/default/inc_bottom.tmpl +++ b/gui/slick/interfaces/default/inc_bottom.tmpl @@ -69,13 +69,13 @@ if min_output: % (localRoot, str(ep_snatched)) )[0 < ep_snatched] %> / $ep_total episodes downloaded $ep_percentage -#for i, event in enumerate($MainHandler.getFooterTime(ajax_layout=False)) +#for i, event in enumerate($MainHandler.getFooterTime(change_layout=False, json_dump=False)) #for k, v in event.items() #set info = re.findall('(.*)_(timeleft|time)', k)[0] #if not i -
+
next connect for... #end if - | $info[0].replace('-', ' '): $v + | $info[0].replace('-', ' '): $v #end for #end for #if diskfree @@ -106,18 +106,31 @@ if min_output: