Merge branch 'feature/ChangeSchedulers' into dev

This commit is contained in:
JackDandy 2023-04-27 12:49:50 +01:00
commit 1579df04f1
29 changed files with 929 additions and 796 deletions

View file

@ -10,6 +10,8 @@
* Update SimpleJSON 3.18.1 (c891b95) to 3.19.1 (aeb63ee) * Update SimpleJSON 3.18.1 (c891b95) to 3.19.1 (aeb63ee)
* Update Tornado Web Server 6.3.0 (7186b86) to 6.3.1 (419838b) * Update Tornado Web Server 6.3.0 (7186b86) to 6.3.1 (419838b)
* Update urllib3 1.26.14 (a06c05c) to 1.26.15 (25cca389) * Update urllib3 1.26.14 (a06c05c) to 1.26.15 (25cca389)
* Change add jobs to centralise scheduler activities
* Change refactor scene_exceptions
### 3.28.0 (2023-04-12 13:05:00 UTC) ### 3.28.0 (2023-04-12 13:05:00 UTC)

View file

@ -555,9 +555,9 @@ class SickGear(object):
name_cache.build_name_cache() name_cache.build_name_cache()
# load all ids from xem # load all ids from xem
sickgear.classes.loading_msg.message = 'Loading xem data' # sickgear.classes.loading_msg.message = 'Loading xem data'
startup_background_tasks = threading.Thread(name='XEMUPDATER', target=sickgear.scene_exceptions.get_xem_ids) # startup_background_tasks = threading.Thread(name='XEMUPDATER', target=sickgear.scene_exceptions.ReleaseMap().fetch_xem_ids)
startup_background_tasks.start() # startup_background_tasks.start()
sickgear.classes.loading_msg.message = 'Checking history' sickgear.classes.loading_msg.message = 'Checking history'
# check history snatched_proper update # check history snatched_proper update
@ -624,7 +624,7 @@ class SickGear(object):
if not switching and (self.force_update or sickgear.UPDATE_SHOWS_ON_START): if not switching and (self.force_update or sickgear.UPDATE_SHOWS_ON_START):
sickgear.classes.loading_msg.message = 'Starting a forced show update' sickgear.classes.loading_msg.message = 'Starting a forced show update'
background_start_forced_show_update = threading.Thread(name='STARTUP-FORCE-SHOW-UPDATE', background_start_forced_show_update = threading.Thread(name='STARTUP-FORCE-SHOW-UPDATE',
target=sickgear.show_update_scheduler.action.run) target=sickgear.update_show_scheduler.action.run)
background_start_forced_show_update.start() background_start_forced_show_update.start()
sickgear.classes.loading_msg.message = 'Switching to default web server' sickgear.classes.loading_msg.message = 'Switching to default web server'

View file

@ -37,7 +37,7 @@ import zlib
from . import classes, db, helpers, image_cache, indexermapper, logger, metadata, naming, people_queue, providers, \ from . import classes, db, helpers, image_cache, indexermapper, logger, metadata, naming, people_queue, providers, \
scene_exceptions, scene_numbering, scheduler, search_backlog, search_propers, search_queue, search_recent, \ scene_exceptions, scene_numbering, scheduler, search_backlog, search_propers, search_queue, search_recent, \
show_queue, show_updater, subtitles, trakt_helpers, version_checker, watchedstate_queue show_queue, show_updater, subtitles, trakt_helpers, version_checker, watchedstate_queue
from . import auto_post_processer, properFinder # must come after the above imports from . import auto_media_process, properFinder # must come after the above imports
from .common import SD, SKIPPED, USER_AGENT from .common import SD, SKIPPED, USER_AGENT
from .config import check_section, check_setting_int, check_setting_str, ConfigMigrator, minimax from .config import check_section, check_setting_int, check_setting_str, ConfigMigrator, minimax
from .databases import cache_db, failed_db, mainDB from .databases import cache_db, failed_db, mainDB
@ -61,7 +61,7 @@ import sg_helpers
# noinspection PyUnreachableCode # noinspection PyUnreachableCode
if False: if False:
from typing import AnyStr, Dict, List from typing import AnyStr, Dict, List, Optional
from adba import Connection from adba import Connection
from .event_queue import Events from .event_queue import Events
from .tv import TVShow from .tv import TVShow
@ -88,23 +88,25 @@ DATA_DIR = ''
# noinspection PyTypeChecker # noinspection PyTypeChecker
events = None # type: Events events = None # type: Events
recent_search_scheduler = None show_queue_scheduler = None # type: Optional[scheduler.Scheduler]
backlog_search_scheduler = None search_queue_scheduler = None # type: Optional[scheduler.Scheduler]
show_update_scheduler = None people_queue_scheduler = None # type: Optional[scheduler.Scheduler]
people_queue_scheduler = None watched_state_queue_scheduler = None # type: Optional[scheduler.Scheduler]
update_software_scheduler = None update_software_scheduler = None # type: Optional[scheduler.Scheduler]
update_packages_scheduler = None update_packages_scheduler = None # type: Optional[scheduler.Scheduler]
show_queue_scheduler = None update_show_scheduler = None # type: Optional[scheduler.Scheduler]
search_queue_scheduler = None update_release_mappings_scheduler = None # type: Optional[scheduler.Scheduler]
proper_finder_scheduler = None search_recent_scheduler = None # type: Optional[scheduler.Scheduler]
media_process_scheduler = None search_backlog_scheduler = None # type: Optional[search_backlog.BacklogSearchScheduler]
subtitles_finder_scheduler = None search_propers_scheduler = None # type: Optional[scheduler.Scheduler]
# trakt_checker_scheduler = None search_subtitles_scheduler = None # type: Optional[scheduler.Scheduler]
emby_watched_state_scheduler = None emby_watched_state_scheduler = None # type: Optional[scheduler.Scheduler]
plex_watched_state_scheduler = None plex_watched_state_scheduler = None # type: Optional[scheduler.Scheduler]
watched_state_queue_scheduler = None process_media_scheduler = None # type: Optional[scheduler.Scheduler]
# noinspection PyTypeChecker # noinspection PyTypeChecker
background_mapping_task = None # type: threading.Thread background_mapping_task = None # type: threading.Thread
# deprecated
# trakt_checker_scheduler = None
provider_ping_thread_pool = {} provider_ping_thread_pool = {}
@ -624,9 +626,11 @@ __INITIALIZED__ = False
__INIT_STAGE__ = 0 __INIT_STAGE__ = 0
# don't reassign MEMCACHE var without reassigning sg_helpers.MEMCACHE # don't reassign MEMCACHE var without reassigning sg_helpers.MEMCACHE
# and scene_exceptions.MEMCACHE
# as long as the pointer is the same (dict only modified) all is fine # as long as the pointer is the same (dict only modified) all is fine
MEMCACHE = {} MEMCACHE = {}
sg_helpers.MEMCACHE = MEMCACHE sg_helpers.MEMCACHE = MEMCACHE
scene_exceptions.MEMCACHE = MEMCACHE
MEMCACHE_FLAG_IMAGES = {} MEMCACHE_FLAG_IMAGES = {}
@ -1518,11 +1522,14 @@ def init_stage_2():
global __INITIALIZED__, MEMCACHE, MEMCACHE_FLAG_IMAGES, RECENTSEARCH_STARTUP global __INITIALIZED__, MEMCACHE, MEMCACHE_FLAG_IMAGES, RECENTSEARCH_STARTUP
# Schedulers # Schedulers
# global trakt_checker_scheduler # global trakt_checker_scheduler
global recent_search_scheduler, backlog_search_scheduler, people_queue_scheduler, show_update_scheduler, \ global update_software_scheduler, update_packages_scheduler, \
update_software_scheduler, update_packages_scheduler, show_queue_scheduler, search_queue_scheduler, \ update_show_scheduler, update_release_mappings_scheduler, \
proper_finder_scheduler, media_process_scheduler, subtitles_finder_scheduler, \ search_backlog_scheduler, search_propers_scheduler, \
background_mapping_task, \ search_recent_scheduler, search_subtitles_scheduler, \
watched_state_queue_scheduler, emby_watched_state_scheduler, plex_watched_state_scheduler search_queue_scheduler, show_queue_scheduler, people_queue_scheduler, \
watched_state_queue_scheduler, emby_watched_state_scheduler, plex_watched_state_scheduler, \
process_media_scheduler, background_mapping_task
# Gen Config/Misc # Gen Config/Misc
global SHOW_UPDATE_HOUR, UPDATE_INTERVAL, UPDATE_PACKAGES_INTERVAL global SHOW_UPDATE_HOUR, UPDATE_INTERVAL, UPDATE_PACKAGES_INTERVAL
# Search Settings/Episode # Search Settings/Episode
@ -1570,32 +1577,17 @@ def init_stage_2():
metadata_provider_dict[tmp_provider.name] = tmp_provider metadata_provider_dict[tmp_provider.name] = tmp_provider
# initialize schedulers # initialize schedulers
# updaters # /
update_now = datetime.timedelta(minutes=0) # queues must be first
update_software_scheduler = scheduler.Scheduler(
version_checker.SoftwareUpdater(),
cycle_time=datetime.timedelta(hours=UPDATE_INTERVAL),
thread_name='SOFTWAREUPDATER',
silent=False)
update_packages_scheduler = scheduler.Scheduler(
version_checker.PackagesUpdater(),
cycle_time=datetime.timedelta(hours=UPDATE_PACKAGES_INTERVAL),
# run_delay=datetime.timedelta(minutes=2),
thread_name='PACKAGESUPDATER',
silent=False)
show_queue_scheduler = scheduler.Scheduler( show_queue_scheduler = scheduler.Scheduler(
show_queue.ShowQueue(), show_queue.ShowQueue(),
cycle_time=datetime.timedelta(seconds=3), cycle_time=datetime.timedelta(seconds=3),
thread_name='SHOWQUEUE') thread_name='SHOWQUEUE')
show_update_scheduler = scheduler.Scheduler( search_queue_scheduler = scheduler.Scheduler(
show_updater.ShowUpdater(), search_queue.SearchQueue(),
cycle_time=datetime.timedelta(hours=1), cycle_time=datetime.timedelta(seconds=3),
start_time=datetime.time(hour=SHOW_UPDATE_HOUR), thread_name='SEARCHQUEUE')
thread_name='SHOWUPDATER',
prevent_cycle_run=show_queue_scheduler.action.is_show_update_running) # 3AM
people_queue_scheduler = scheduler.Scheduler( people_queue_scheduler = scheduler.Scheduler(
people_queue.PeopleQueue(), people_queue.PeopleQueue(),
@ -1603,21 +1595,52 @@ def init_stage_2():
thread_name='PEOPLEQUEUE' thread_name='PEOPLEQUEUE'
) )
# searchers watched_state_queue_scheduler = scheduler.Scheduler(
search_queue_scheduler = scheduler.Scheduler( watchedstate_queue.WatchedStateQueue(),
search_queue.SearchQueue(),
cycle_time=datetime.timedelta(seconds=3), cycle_time=datetime.timedelta(seconds=3),
thread_name='SEARCHQUEUE') thread_name='WATCHEDSTATEQUEUE')
# /
# updaters
update_software_scheduler = scheduler.Scheduler(
version_checker.SoftwareUpdater(),
cycle_time=datetime.timedelta(hours=UPDATE_INTERVAL),
thread_name='SOFTWAREUPDATE',
silent=False)
update_packages_scheduler = scheduler.Scheduler(
version_checker.PackagesUpdater(),
cycle_time=datetime.timedelta(hours=UPDATE_PACKAGES_INTERVAL),
# run_delay=datetime.timedelta(minutes=2),
thread_name='PACKAGESUPDATE',
silent=False)
update_show_scheduler = scheduler.Scheduler(
show_updater.ShowUpdater(),
cycle_time=datetime.timedelta(hours=1),
start_time=datetime.time(hour=SHOW_UPDATE_HOUR),
thread_name='SHOWDATAUPDATE',
prevent_cycle_run=show_queue_scheduler.action.is_show_update_running) # 3AM
classes.loading_msg.message = 'Loading show maps'
update_release_mappings_scheduler = scheduler.Scheduler(
scene_exceptions.ReleaseMap(),
cycle_time=datetime.timedelta(hours=2),
thread_name='SHOWMAPSUPDATE',
silent=False)
# /
# searchers
init_search_delay = int(os.environ.get('INIT_SEARCH_DELAY', 0)) init_search_delay = int(os.environ.get('INIT_SEARCH_DELAY', 0))
# enter 4499 (was 4489) for experimental internal provider intervals # enter 4499 (was 4489) for experimental internal provider intervals
update_interval = datetime.timedelta(minutes=(RECENTSEARCH_INTERVAL, 1)[4499 == RECENTSEARCH_INTERVAL]) update_interval = datetime.timedelta(minutes=(RECENTSEARCH_INTERVAL, 1)[4499 == RECENTSEARCH_INTERVAL])
recent_search_scheduler = scheduler.Scheduler( update_now = datetime.timedelta(minutes=0)
search_recent_scheduler = scheduler.Scheduler(
search_recent.RecentSearcher(), search_recent.RecentSearcher(),
cycle_time=update_interval, cycle_time=update_interval,
run_delay=update_now if RECENTSEARCH_STARTUP else datetime.timedelta(minutes=init_search_delay or 5), run_delay=update_now if RECENTSEARCH_STARTUP else datetime.timedelta(minutes=init_search_delay or 5),
thread_name='RECENTSEARCHER', thread_name='RECENTSEARCH',
prevent_cycle_run=search_queue_scheduler.action.is_recentsearch_in_progress) prevent_cycle_run=search_queue_scheduler.action.is_recentsearch_in_progress)
if [x for x in providers.sorted_sources() if [x for x in providers.sorted_sources()
@ -1635,14 +1658,13 @@ def init_stage_2():
backlogdelay = helpers.try_int((time_diff.total_seconds() / 60) + 10, 10) backlogdelay = helpers.try_int((time_diff.total_seconds() / 60) + 10, 10)
else: else:
backlogdelay = 10 backlogdelay = 10
backlog_search_scheduler = search_backlog.BacklogSearchScheduler( search_backlog_scheduler = search_backlog.BacklogSearchScheduler(
search_backlog.BacklogSearcher(), search_backlog.BacklogSearcher(),
cycle_time=datetime.timedelta(minutes=get_backlog_cycle_time()), cycle_time=datetime.timedelta(minutes=get_backlog_cycle_time()),
run_delay=datetime.timedelta(minutes=init_search_delay or backlogdelay), run_delay=datetime.timedelta(minutes=init_search_delay or backlogdelay),
thread_name='BACKLOG', thread_name='BACKLOGSEARCH',
prevent_cycle_run=search_queue_scheduler.action.is_standard_backlog_in_progress) prevent_cycle_run=search_queue_scheduler.action.is_standard_backlog_in_progress)
propers_searcher = search_propers.ProperSearcher()
last_proper_search = datetime.datetime.fromtimestamp(properFinder.get_last_proper_search()) last_proper_search = datetime.datetime.fromtimestamp(properFinder.get_last_proper_search())
time_diff = datetime.timedelta(days=1) - (datetime.datetime.now() - last_proper_search) time_diff = datetime.timedelta(days=1) - (datetime.datetime.now() - last_proper_search)
if time_diff < datetime.timedelta(seconds=0): if time_diff < datetime.timedelta(seconds=0):
@ -1650,34 +1672,21 @@ def init_stage_2():
else: else:
properdelay = helpers.try_int((time_diff.total_seconds() / 60) + 5, 20) properdelay = helpers.try_int((time_diff.total_seconds() / 60) + 5, 20)
proper_finder_scheduler = scheduler.Scheduler( search_propers_scheduler = scheduler.Scheduler(
propers_searcher, search_propers.ProperSearcher(),
cycle_time=datetime.timedelta(days=1), cycle_time=datetime.timedelta(days=1),
run_delay=datetime.timedelta(minutes=init_search_delay or properdelay), run_delay=datetime.timedelta(minutes=init_search_delay or properdelay),
thread_name='FINDPROPERS', thread_name='PROPERSSEARCH',
prevent_cycle_run=search_queue_scheduler.action.is_propersearch_in_progress) prevent_cycle_run=search_queue_scheduler.action.is_propersearch_in_progress)
# processors search_subtitles_scheduler = scheduler.Scheduler(
media_process_scheduler = scheduler.Scheduler(
auto_post_processer.PostProcesser(),
cycle_time=datetime.timedelta(minutes=MEDIAPROCESS_INTERVAL),
thread_name='POSTPROCESSER',
silent=not PROCESS_AUTOMATICALLY)
subtitles_finder_scheduler = scheduler.Scheduler(
subtitles.SubtitlesFinder(), subtitles.SubtitlesFinder(),
cycle_time=datetime.timedelta(hours=SUBTITLES_FINDER_INTERVAL), cycle_time=datetime.timedelta(hours=SUBTITLES_FINDER_INTERVAL),
thread_name='FINDSUBTITLES', thread_name='SUBTITLESEARCH',
silent=not USE_SUBTITLES) silent=not USE_SUBTITLES)
background_mapping_task = threading.Thread(name='MAPPINGSUPDATER', target=indexermapper.load_mapped_ids, # /
kwargs={'load_all': True}) # others
watched_state_queue_scheduler = scheduler.Scheduler(
watchedstate_queue.WatchedStateQueue(),
cycle_time=datetime.timedelta(seconds=3),
thread_name='WATCHEDSTATEQUEUE')
emby_watched_state_scheduler = scheduler.Scheduler( emby_watched_state_scheduler = scheduler.Scheduler(
EmbyWatchedStateUpdater(), EmbyWatchedStateUpdater(),
cycle_time=datetime.timedelta(minutes=EMBY_WATCHEDSTATE_INTERVAL), cycle_time=datetime.timedelta(minutes=EMBY_WATCHEDSTATE_INTERVAL),
@ -1690,6 +1699,15 @@ def init_stage_2():
run_delay=datetime.timedelta(minutes=5), run_delay=datetime.timedelta(minutes=5),
thread_name='PLEXWATCHEDSTATE') thread_name='PLEXWATCHEDSTATE')
process_media_scheduler = scheduler.Scheduler(
auto_media_process.MediaProcess(),
cycle_time=datetime.timedelta(minutes=MEDIAPROCESS_INTERVAL),
thread_name='PROCESSMEDIA',
silent=not PROCESS_AUTOMATICALLY)
background_mapping_task = threading.Thread(name='MAPPINGUPDATES', target=indexermapper.load_mapped_ids,
kwargs={'load_all': True})
MEMCACHE['history_tab_limit'] = 11 MEMCACHE['history_tab_limit'] = 11
MEMCACHE['history_tab'] = History.menu_tab(MEMCACHE['history_tab_limit']) MEMCACHE['history_tab'] = History.menu_tab(MEMCACHE['history_tab_limit'])
@ -1707,11 +1725,15 @@ def init_stage_2():
def enabled_schedulers(is_init=False): def enabled_schedulers(is_init=False):
# ([], [trakt_checker_scheduler])[USE_TRAKT] + \ # ([], [trakt_checker_scheduler])[USE_TRAKT] + \
return ([], [events])[is_init] \ return ([], [events])[is_init] \
+ ([], [recent_search_scheduler, backlog_search_scheduler, show_update_scheduler, people_queue_scheduler, + ([], [update_software_scheduler, update_packages_scheduler,
update_software_scheduler, update_packages_scheduler, update_show_scheduler, update_release_mappings_scheduler,
show_queue_scheduler, search_queue_scheduler, proper_finder_scheduler, search_recent_scheduler, search_backlog_scheduler,
media_process_scheduler, subtitles_finder_scheduler, search_propers_scheduler, search_subtitles_scheduler,
emby_watched_state_scheduler, plex_watched_state_scheduler, watched_state_queue_scheduler] show_queue_scheduler, search_queue_scheduler,
people_queue_scheduler, watched_state_queue_scheduler,
emby_watched_state_scheduler, plex_watched_state_scheduler,
process_media_scheduler
]
)[not MEMCACHE.get('update_restart')] \ )[not MEMCACHE.get('update_restart')] \
+ ([events], [])[is_init] + ([events], [])[is_init]

View file

@ -18,32 +18,31 @@ import os.path
import sickgear import sickgear
from . import logger, processTV from . import logger, processTV
from .scheduler import Job
class PostProcesser(object): class MediaProcess(Job):
def __init__(self): def __init__(self):
self.amActive = False super(MediaProcess, self).__init__(self.job_run, kwargs={})
@staticmethod @staticmethod
def is_enabled(): def is_enabled():
return sickgear.PROCESS_AUTOMATICALLY return sickgear.PROCESS_AUTOMATICALLY
def run(self): def job_run(self):
if self.is_enabled(): if self.is_enabled():
self.amActive = True
self._main() self._main()
self.amActive = False
@staticmethod @staticmethod
def _main(): def _main():
if not os.path.isdir(sickgear.TV_DOWNLOAD_DIR): if not os.path.isdir(sickgear.TV_DOWNLOAD_DIR):
logger.error('Automatic post-processing attempted but dir %s doesn\'t exist' % sickgear.TV_DOWNLOAD_DIR) logger.error('Automatic media processing attempted but dir %s doesn\'t exist' % sickgear.TV_DOWNLOAD_DIR)
return return
if not os.path.isabs(sickgear.TV_DOWNLOAD_DIR): if not os.path.isabs(sickgear.TV_DOWNLOAD_DIR):
logger.error('Automatic post-processing attempted but dir %s is relative ' logger.error('Automatic media processing attempted but dir %s is relative '
'(and probably not what you really want to process)' % sickgear.TV_DOWNLOAD_DIR) '(and probably not what you really want to process)' % sickgear.TV_DOWNLOAD_DIR)
return return
processTV.processDir(sickgear.TV_DOWNLOAD_DIR, is_basedir=True) processTV.process_dir(sickgear.TV_DOWNLOAD_DIR, is_basedir=True)

View file

@ -152,8 +152,8 @@ def schedule_mediaprocess(iv):
if sickgear.MEDIAPROCESS_INTERVAL < sickgear.MIN_MEDIAPROCESS_INTERVAL: if sickgear.MEDIAPROCESS_INTERVAL < sickgear.MIN_MEDIAPROCESS_INTERVAL:
sickgear.MEDIAPROCESS_INTERVAL = sickgear.MIN_MEDIAPROCESS_INTERVAL sickgear.MEDIAPROCESS_INTERVAL = sickgear.MIN_MEDIAPROCESS_INTERVAL
sickgear.media_process_scheduler.cycle_time = datetime.timedelta(minutes=sickgear.MEDIAPROCESS_INTERVAL) sickgear.process_media_scheduler.cycle_time = datetime.timedelta(minutes=sickgear.MEDIAPROCESS_INTERVAL)
sickgear.media_process_scheduler.set_paused_state() sickgear.process_media_scheduler.set_paused_state()
def schedule_recentsearch(iv): def schedule_recentsearch(iv):
@ -162,14 +162,14 @@ def schedule_recentsearch(iv):
if sickgear.RECENTSEARCH_INTERVAL < sickgear.MIN_RECENTSEARCH_INTERVAL: if sickgear.RECENTSEARCH_INTERVAL < sickgear.MIN_RECENTSEARCH_INTERVAL:
sickgear.RECENTSEARCH_INTERVAL = sickgear.MIN_RECENTSEARCH_INTERVAL sickgear.RECENTSEARCH_INTERVAL = sickgear.MIN_RECENTSEARCH_INTERVAL
sickgear.recent_search_scheduler.cycle_time = datetime.timedelta(minutes=sickgear.RECENTSEARCH_INTERVAL) sickgear.search_recent_scheduler.cycle_time = datetime.timedelta(minutes=sickgear.RECENTSEARCH_INTERVAL)
def schedule_backlog(iv): def schedule_backlog(iv):
sickgear.BACKLOG_PERIOD = minimax(iv, sickgear.DEFAULT_BACKLOG_PERIOD, sickgear.BACKLOG_PERIOD = minimax(iv, sickgear.DEFAULT_BACKLOG_PERIOD,
sickgear.MIN_BACKLOG_PERIOD, sickgear.MAX_BACKLOG_PERIOD) sickgear.MIN_BACKLOG_PERIOD, sickgear.MAX_BACKLOG_PERIOD)
sickgear.backlog_search_scheduler.action.cycle_time = sickgear.BACKLOG_PERIOD sickgear.search_backlog_scheduler.action.cycle_time = sickgear.BACKLOG_PERIOD
def schedule_update_software(iv): def schedule_update_software(iv):
@ -220,7 +220,7 @@ def schedule_update_packages_notify(update_packages_notify):
def schedule_download_propers(download_propers): def schedule_download_propers(download_propers):
if sickgear.DOWNLOAD_PROPERS != download_propers: if sickgear.DOWNLOAD_PROPERS != download_propers:
sickgear.DOWNLOAD_PROPERS = download_propers sickgear.DOWNLOAD_PROPERS = download_propers
sickgear.proper_finder_scheduler.set_paused_state() sickgear.search_propers_scheduler.set_paused_state()
def schedule_trakt(use_trakt): def schedule_trakt(use_trakt):
@ -233,7 +233,7 @@ def schedule_trakt(use_trakt):
def schedule_subtitles(use_subtitles): def schedule_subtitles(use_subtitles):
if sickgear.USE_SUBTITLES != use_subtitles: if sickgear.USE_SUBTITLES != use_subtitles:
sickgear.USE_SUBTITLES = use_subtitles sickgear.USE_SUBTITLES = use_subtitles
sickgear.subtitles_finder_scheduler.set_paused_state() sickgear.search_subtitles_scheduler.set_paused_state()
def schedule_emby_watched(emby_watched_interval): def schedule_emby_watched(emby_watched_interval):

View file

@ -1,5 +1,4 @@
from lib.six import moves import queue
import threading import threading
@ -15,7 +14,7 @@ class Event(object):
class Events(threading.Thread): class Events(threading.Thread):
def __init__(self, callback): def __init__(self, callback):
super(Events, self).__init__() super(Events, self).__init__()
self.queue = moves.queue.Queue() self.queue = queue.Queue()
self.daemon = True self.daemon = True
self.callback = callback self.callback = callback
self.name = 'EVENT-QUEUE' self.name = 'EVENT-QUEUE'
@ -31,24 +30,24 @@ class Events(threading.Thread):
while not self._stopper.is_set(): while not self._stopper.is_set():
try: try:
# get event type # get event type
etype = self.queue.get(True, 1) ev_type = self.queue.get(True, 1)
except moves.queue.Empty: except queue.Empty:
etype = 'Empty' ev_type = 'Empty'
except(BaseException, Exception): except(BaseException, Exception):
etype = None ev_type = None
if etype in (self.SystemEvent.RESTART, self.SystemEvent.SHUTDOWN, None, 'Empty'): if ev_type in (self.SystemEvent.RESTART, self.SystemEvent.SHUTDOWN, None, 'Empty'):
if etype in ('Empty',): if ev_type in ('Empty',):
continue continue
from sickgear import logger from sickgear import logger
logger.debug(f'Callback {self.callback.__name__}(event type:{etype})') logger.debug(f'Callback {self.callback.__name__}(event type:{ev_type})')
try: try:
# perform callback if we got an event type # perform callback if we got an event type
self.callback(etype) self.callback(ev_type)
# event completed # event completed
self.queue.task_done() self.queue.task_done()
except moves.queue.Empty: except queue.Empty:
pass pass
# exiting thread # exiting thread

View file

@ -19,6 +19,7 @@ import datetime
import threading import threading
from . import db, logger from . import db, logger
from .scheduler import Job
from exceptions_helper import ex from exceptions_helper import ex
from six import integer_types from six import integer_types
@ -37,9 +38,10 @@ class QueuePriorities(object):
VERYHIGH = 40 VERYHIGH = 40
class GenericQueue(object): class GenericQueue(Job):
def __init__(self, cache_db_tables=None, main_db_tables=None): def __init__(self, cache_db_tables=None, main_db_tables=None):
# type: (List[AnyStr], List[AnyStr]) -> None # type: (List[AnyStr], List[AnyStr]) -> None
super(GenericQueue, self).__init__(self.job_run, silent=True, kwargs={}, reentrant_lock=True)
self.currentItem = None # type: QueueItem or None self.currentItem = None # type: QueueItem or None
@ -51,13 +53,41 @@ class GenericQueue(object):
self.events = {} # type: Dict[int, List[Callable]] self.events = {} # type: Dict[int, List[Callable]]
self.lock = threading.RLock()
self.cache_db_tables = cache_db_tables or [] # type: List[AnyStr] self.cache_db_tables = cache_db_tables or [] # type: List[AnyStr]
self.main_db_tables = main_db_tables or [] # type: List[AnyStr] self.main_db_tables = main_db_tables or [] # type: List[AnyStr]
self._id_counter = self._load_init_id() # type: integer_types self._id_counter = self._load_init_id() # type: integer_types
def job_run(self):
# only start a new task if one isn't already going
with self.lock:
if None is self.currentItem or not self.currentItem.is_alive():
# if the thread is dead then the current item should be finished
if self.currentItem:
self.currentItem.finish()
try:
self.delete_item(self.currentItem, finished_run=True)
except (BaseException, Exception):
pass
self.currentItem = None
# if there's something in the queue then run it in a thread and take it out of the queue
if 0 < len(self.queue):
self.queue.sort(key=lambda y: (-y.priority, y.added))
if self.queue[0].priority < self.min_priority:
return
# launch the queue item in a thread
self.currentItem = self.queue.pop(0)
if 'SEARCHQUEUE' != self.queue_name:
self.currentItem.name = self.queue_name + '-' + self.currentItem.name
self.currentItem.start()
self.check_events()
def _load_init_id(self): def _load_init_id(self):
# type: (...) -> integer_types # type: (...) -> integer_types
""" """
@ -216,7 +246,7 @@ class GenericQueue(object):
self.min_priority = 999999999999 self.min_priority = 999999999999
def unpause(self): def unpause(self):
logger.log('Unpausing queue') logger.log('Un-pausing queue')
with self.lock: with self.lock:
self.min_priority = 0 self.min_priority = 0
@ -269,36 +299,6 @@ class GenericQueue(object):
except (BaseException, Exception) as e: except (BaseException, Exception) as e:
logger.error('Error executing Event: %s' % ex(e)) logger.error('Error executing Event: %s' % ex(e))
def run(self):
# only start a new task if one isn't already going
with self.lock:
if None is self.currentItem or not self.currentItem.is_alive():
# if the thread is dead then the current item should be finished
if self.currentItem:
self.currentItem.finish()
try:
self.delete_item(self.currentItem, finished_run=True)
except (BaseException, Exception):
pass
self.currentItem = None
# if there's something in the queue then run it in a thread and take it out of the queue
if 0 < len(self.queue):
self.queue.sort(key=lambda y: (-y.priority, y.added))
if self.queue[0].priority < self.min_priority:
return
# launch the queue item in a thread
self.currentItem = self.queue.pop(0)
if 'SEARCHQUEUE' != self.queue_name:
self.currentItem.name = self.queue_name + '-' + self.currentItem.name
self.currentItem.start()
self.check_events()
class QueueItem(threading.Thread): class QueueItem(threading.Thread):
def __init__(self, name, action_id=0, uid=None): def __init__(self, name, action_id=0, uid=None):

View file

@ -14,6 +14,7 @@
# You should have received a copy of the GNU General Public License # You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>. # along with SickGear. If not, see <http://www.gnu.org/licenses/>.
from collections import defaultdict
import threading import threading
import sickgear import sickgear
@ -21,6 +22,7 @@ from . import db
from .helpers import full_sanitize_scene_name, try_int from .helpers import full_sanitize_scene_name, try_int
from six import iteritems from six import iteritems
from _23 import map_consume
# noinspection PyUnreachableCode # noinspection PyUnreachableCode
if False: if False:
@ -85,18 +87,19 @@ def build_name_cache(show_obj=None, update_only_scene=False):
if show_obj: if show_obj:
# search for only the requested show id and flush old show entries from namecache # search for only the requested show id and flush old show entries from namecache
show_ids = {show_obj.tvid: [show_obj.prodid]} show_ids = {show_obj.tvid: [show_obj.prodid]}
nameCache = dict([(k, v) for k, v in iteritems(nameCache) nameCache = dict([(k, v) for k, v in iteritems(nameCache)
if not (v[0] == show_obj.tvid and v[1] == show_obj.prodid)]) if not (v[0] == show_obj.tvid and v[1] == show_obj.prodid)])
sceneNameCache = dict([(k, v) for k, v in iteritems(sceneNameCache) sceneNameCache = dict([(k, v) for k, v in iteritems(sceneNameCache)
if not (v[0] == show_obj.tvid and v[1] == show_obj.prodid)]) if not (v[0] == show_obj.tvid and v[1] == show_obj.prodid)])
# add standard indexer name to namecache # add standard indexer name to namecache
nameCache[full_sanitize_scene_name(show_obj.unique_name or show_obj.name)] = [show_obj.tvid, show_obj.prodid, -1] nameCache[full_sanitize_scene_name(show_obj.unique_name or show_obj.name)] = \
[show_obj.tvid, show_obj.prodid, -1]
else: else:
# generate list of production ids to look up in cache.db # generate list of production ids to look up in cache.db
show_ids = {} show_ids = defaultdict(list)
for cur_show_obj in sickgear.showList: map_consume(lambda _so: show_ids[_so.tvid].append(_so.prodid), sickgear.showList)
show_ids.setdefault(cur_show_obj.tvid, []).append(cur_show_obj.prodid)
# add all standard show indexer names to namecache # add all standard show indexer names to namecache
nameCache = dict( nameCache = dict(
@ -104,33 +107,32 @@ def build_name_cache(show_obj=None, update_only_scene=False):
for cur_so in sickgear.showList if cur_so]) for cur_so in sickgear.showList if cur_so])
sceneNameCache = {} sceneNameCache = {}
cache_db = db.DBConnection()
cache_results = []
if update_only_scene:
# generate list of production ids to look up in cache.db
show_ids = {}
for cur_show_obj in sickgear.showList:
show_ids.setdefault(cur_show_obj.tvid, []).append(cur_show_obj.prodid)
tmp_scene_name_cache = {}
else:
tmp_scene_name_cache = sceneNameCache.copy() tmp_scene_name_cache = sceneNameCache.copy()
for t, s in iteritems(show_ids): else:
# generate list of production ids to look up in cache.db
show_ids = defaultdict(list)
map_consume(lambda _so: show_ids[_so.tvid].append(_so.prodid), sickgear.showList)
tmp_scene_name_cache = {}
cache_results = []
cache_db = db.DBConnection()
for cur_tvid, cur_prodid_list in iteritems(show_ids):
cache_results += cache_db.select( cache_results += cache_db.select(
'SELECT show_name, indexer AS tv_id, indexer_id AS prod_id, season' f'SELECT show_name, indexer AS tv_id, indexer_id AS prod_id, season'
' FROM scene_exceptions' f' FROM scene_exceptions'
' WHERE indexer = %s AND indexer_id IN (%s)' % (t, ','.join(['%s' % i for i in s]))) f' WHERE indexer = {cur_tvid} AND indexer_id IN ({",".join(map(str, cur_prodid_list))})')
if cache_results: if cache_results:
for cache_result in cache_results: for cur_result in cache_results:
tvid = int(cache_result['tv_id']) tvid = int(cur_result['tv_id'])
prodid = int(cache_result['prod_id']) prodid = int(cur_result['prod_id'])
season = try_int(cache_result['season'], -1) season = try_int(cur_result['season'], -1)
name = full_sanitize_scene_name(cache_result['show_name']) name = full_sanitize_scene_name(cur_result['show_name'])
tmp_scene_name_cache[name] = [tvid, prodid, season] tmp_scene_name_cache[name] = [tvid, prodid, season]
sceneNameCache = tmp_scene_name_cache sceneNameCache = tmp_scene_name_cache.copy()
def remove_from_namecache(tvid, prodid): def remove_from_namecache(tvid, prodid):

View file

@ -407,7 +407,8 @@ class NameParser(object):
new_season_numbers.append(s) new_season_numbers.append(s)
elif show_obj.is_anime and len(best_result.ab_episode_numbers) and not self.testing: elif show_obj.is_anime and len(best_result.ab_episode_numbers) and not self.testing:
scene_season = scene_exceptions.get_scene_exception_by_name(best_result.series_name)[2] scene_season = scene_exceptions.get_scene_exception_by_name(
best_result.series_name)[2]
for epAbsNo in best_result.ab_episode_numbers: for epAbsNo in best_result.ab_episode_numbers:
a = epAbsNo a = epAbsNo

View file

@ -1141,10 +1141,9 @@ class ProcessTVShow(object):
self._buffer(processor.log.strip('\n')) self._buffer(processor.log.strip('\n'))
# backward compatibility prevents the case of this function name from being updated to PEP8 def process_dir(dir_name, nzb_name=None, process_method=None, force=False, force_replace=None,
def processDir(dir_name, nzb_name=None, process_method=None, force=False, force_replace=None, failed=False, pp_type='auto', cleanup=False, webhandler=None, show_obj=None, is_basedir=True,
failed=False, pp_type='auto', cleanup=False, webhandler=None, show_obj=None, is_basedir=True, skip_failure_processing=False, client=None):
skip_failure_processing=False, client=None):
""" """
:param dir_name: dir name :param dir_name: dir name
@ -1182,6 +1181,10 @@ def processDir(dir_name, nzb_name=None, process_method=None, force=False, force_
pp_type, cleanup, show_obj) pp_type, cleanup, show_obj)
# backward compatibility
processDir = process_dir
def process_minimal(nzb_name, show_obj, failed, webhandler): def process_minimal(nzb_name, show_obj, failed, webhandler):
# type: (AnyStr, TVShow, bool, Any) -> None # type: (AnyStr, TVShow, bool, Any) -> None
ProcessTVShow(webhandler).process_minimal(nzb_name, show_obj, failed, webhandler) ProcessTVShow(webhandler).process_minimal(nzb_name, show_obj, failed, webhandler)

View file

@ -71,7 +71,7 @@ def search_propers(provider_proper_obj=None):
if None is provider_proper_obj: if None is provider_proper_obj:
_set_last_proper_search(datetime.datetime.now()) _set_last_proper_search(datetime.datetime.now())
proper_sch = sickgear.proper_finder_scheduler proper_sch = sickgear.search_propers_scheduler
if None is proper_sch.start_time: if None is proper_sch.start_time:
run_in = proper_sch.last_run + proper_sch.cycle_time - datetime.datetime.now() run_in = proper_sch.last_run + proper_sch.cycle_time - datetime.datetime.now()
run_at = ', next check ' run_at = ', next check '
@ -696,7 +696,7 @@ def _set_last_proper_search(when):
def next_proper_timeleft(): def next_proper_timeleft():
return sickgear.proper_finder_scheduler.time_left() return sickgear.search_propers_scheduler.time_left()
def get_last_proper_search(): def get_last_proper_search():

View file

@ -37,7 +37,7 @@ from ..classes import NZBSearchResult, TorrentSearchResult, SearchResult
from ..common import Quality, MULTI_EP_RESULT, SEASON_RESULT, USER_AGENT from ..common import Quality, MULTI_EP_RESULT, SEASON_RESULT, USER_AGENT
from ..helpers import maybe_plural, remove_file_perm from ..helpers import maybe_plural, remove_file_perm
from ..name_parser.parser import InvalidNameException, InvalidShowException, NameParser from ..name_parser.parser import InvalidNameException, InvalidShowException, NameParser
from ..scene_exceptions import has_season_exceptions from ..scene_exceptions import ReleaseMap
from ..show_name_helpers import get_show_names_all_possible from ..show_name_helpers import get_show_names_all_possible
from ..sgdatetime import SGDatetime from ..sgdatetime import SGDatetime
from ..tv import TVEpisode, TVShow from ..tv import TVEpisode, TVShow
@ -1743,7 +1743,8 @@ class TorrentProvider(GenericProvider):
return [] return []
show_obj = ep_obj.show_obj show_obj = ep_obj.show_obj
season = (-1, ep_obj.season)[has_season_exceptions(ep_obj.show_obj.tvid, ep_obj.show_obj.prodid, ep_obj.season)] season = (-1, ep_obj.season)[ReleaseMap().has_season_exceptions(
ep_obj.show_obj.tvid, ep_obj.show_obj.prodid, ep_obj.season)]
ep_dict = self._ep_dict(ep_obj) ep_dict = self._ep_dict(ep_obj)
sp_detail = (show_obj.air_by_date or show_obj.is_sports) and str(ep_obj.airdate).split('-')[0] or \ sp_detail = (show_obj.air_by_date or show_obj.is_sports) and str(ep_obj.airdate).split('-')[0] or \
(show_obj.is_anime and ep_obj.scene_absolute_number or (show_obj.is_anime and ep_obj.scene_absolute_number or
@ -1779,7 +1780,8 @@ class TorrentProvider(GenericProvider):
return [] return []
show_obj = ep_obj.show_obj show_obj = ep_obj.show_obj
season = (-1, ep_obj.season)[has_season_exceptions(ep_obj.show_obj.tvid, ep_obj.show_obj.prodid, ep_obj.season)] season = (-1, ep_obj.season)[ReleaseMap().has_season_exceptions(
ep_obj.show_obj.tvid, ep_obj.show_obj.prodid, ep_obj.season)]
if show_obj.air_by_date or show_obj.is_sports: if show_obj.air_by_date or show_obj.is_sports:
ep_detail = [str(ep_obj.airdate).replace('-', sep_date)]\ ep_detail = [str(ep_obj.airdate).replace('-', sep_date)]\
if 'date_detail' not in kwargs else kwargs['date_detail'](ep_obj.airdate) if 'date_detail' not in kwargs else kwargs['date_detail'](ep_obj.airdate)

View file

@ -34,7 +34,7 @@ from ..network_timezones import SG_TIMEZONE
from ..sgdatetime import SGDatetime from ..sgdatetime import SGDatetime
from ..search import get_aired_in_season, get_wanted_qualities from ..search import get_aired_in_season, get_wanted_qualities
from ..show_name_helpers import get_show_names from ..show_name_helpers import get_show_names
from ..scene_exceptions import has_season_exceptions from ..scene_exceptions import ReleaseMap
from ..tv import TVEpisode, TVShow from ..tv import TVEpisode, TVShow
from lib.dateutil import parser from lib.dateutil import parser
@ -470,7 +470,7 @@ class NewznabProvider(generic.NZBProvider):
# id search # id search
params = base_params.copy() params = base_params.copy()
use_id = False use_id = False
if not has_season_exceptions(ep_obj.show_obj.tvid, ep_obj.show_obj.prodid, ep_obj.season): if not ReleaseMap().has_season_exceptions(ep_obj.show_obj.tvid, ep_obj.show_obj.prodid, ep_obj.season):
for i in sickgear.TVInfoAPI().all_sources: for i in sickgear.TVInfoAPI().all_sources:
if i in ep_obj.show_obj.ids and 0 < ep_obj.show_obj.ids[i]['id'] and i in self.caps: if i in ep_obj.show_obj.ids and 0 < ep_obj.show_obj.ids[i]['id'] and i in self.caps:
params[self.caps[i]] = ep_obj.show_obj.ids[i]['id'] params[self.caps[i]] = ep_obj.show_obj.ids[i]['id']
@ -528,7 +528,7 @@ class NewznabProvider(generic.NZBProvider):
# id search # id search
params = base_params.copy() params = base_params.copy()
use_id = False use_id = False
if not has_season_exceptions(ep_obj.show_obj.tvid, ep_obj.show_obj.prodid, ep_obj.season): if not ReleaseMap().has_season_exceptions(ep_obj.show_obj.tvid, ep_obj.show_obj.prodid, ep_obj.season):
for i in sickgear.TVInfoAPI().all_sources: for i in sickgear.TVInfoAPI().all_sources:
if i in ep_obj.show_obj.ids and 0 < ep_obj.show_obj.ids[i]['id'] and i in self.caps: if i in ep_obj.show_obj.ids and 0 < ep_obj.show_obj.ids[i]['id'] and i in self.caps:
params[self.caps[i]] = ep_obj.show_obj.ids[i]['id'] params[self.caps[i]] = ep_obj.show_obj.ids[i]['id']

File diff suppressed because it is too large Load diff

View file

@ -19,17 +19,14 @@
# @copyright: Dermot Buckley # @copyright: Dermot Buckley
# #
import datetime
import traceback import traceback
from sqlite3 import Row from sqlite3 import Row
from exceptions_helper import ex from exceptions_helper import ex
import sickgear import sickgear
from . import db, logger from . import db, logger
from .helpers import try_int from .helpers import try_int
from .scene_exceptions import xem_ids_list
from .sgdatetime import SGDatetime from .sgdatetime import SGDatetime
# noinspection PyUnreachableCode # noinspection PyUnreachableCode
@ -774,7 +771,8 @@ def xem_refresh(tvid, prodid, force=False):
tvid, prodid = int(tvid), int(prodid) tvid, prodid = int(tvid), int(prodid)
tvinfo = sickgear.TVInfoAPI(tvid) tvinfo = sickgear.TVInfoAPI(tvid)
if 'xem_origin' not in tvinfo.config or prodid not in xem_ids_list.get(tvid, []): if 'xem_origin' not in tvinfo.config \
or prodid not in sickgear.scene_exceptions.MEMCACHE['release_map_xem'].get(tvid, []):
return return
xem_origin = tvinfo.config['xem_origin'] xem_origin = tvinfo.config['xem_origin']

View file

@ -24,10 +24,12 @@ import traceback
from . import logger from . import logger
from exceptions_helper import ex from exceptions_helper import ex
import sickgear
class Scheduler(threading.Thread): class Scheduler(threading.Thread):
def __init__(self, action, cycle_time=datetime.timedelta(minutes=10), run_delay=datetime.timedelta(minutes=0), def __init__(self, action, cycle_time=datetime.timedelta(minutes=10), run_delay=datetime.timedelta(minutes=0),
start_time=None, thread_name="ScheduledThread", silent=True, prevent_cycle_run=None, paused=False): start_time=None, thread_name='ScheduledThread', silent=True, prevent_cycle_run=None, paused=False):
super(Scheduler, self).__init__() super(Scheduler, self).__init__()
self.last_run = datetime.datetime.now() + run_delay - cycle_time self.last_run = datetime.datetime.now() + run_delay - cycle_time
@ -41,10 +43,18 @@ class Scheduler(threading.Thread):
self._stopper = threading.Event() self._stopper = threading.Event()
self._unpause = threading.Event() self._unpause = threading.Event()
if not paused: if not paused:
self._unpause.set() self.unpause()
self.lock = threading.Lock() self.lock = threading.Lock()
self.force = False self.force = False
@property
def is_running_job(self):
# type: (...) -> bool
"""
Return running state of the scheduled action
"""
return self.action.amActive
def pause(self): def pause(self):
self._unpause.clear() self._unpause.clear()
@ -69,7 +79,7 @@ class Scheduler(threading.Thread):
return self.cycle_time - (datetime.datetime.now() - self.last_run) return self.cycle_time - (datetime.datetime.now() - self.last_run)
def force_run(self): def force_run(self):
if not self.action.amActive: if not self.is_running_job:
self.force = True self.force = True
return True return True
return False return False
@ -139,3 +149,72 @@ class Scheduler(threading.Thread):
# exiting thread # exiting thread
self._stopper.clear() self._stopper.clear()
self._unpause.clear() self._unpause.clear()
@staticmethod
def blocking_jobs():
# type (...) -> bool
"""
Return description of running jobs, or False if none are running
These jobs should prevent a restart/shutdown while running.
"""
job_report = []
if sickgear.process_media_scheduler.is_running_job:
job_report.append('Media processing')
if sickgear.update_show_scheduler.is_running_job:
job_report.append(f'{("U", "u")[len(job_report)]}pdating shows data')
# this idea is not ready for production. issues identified...
# 1. many are just the queue filling process, so this doesn't actually prevents restart/shutdown during those
# 2. in case something goes wrong or there is a bad bug in ithe code, the restart would be prevented
# (meaning no auto- / manual update via ui, user is forced to kill the process, manually update and restart)
# 3. just by bad timing the autoupdate process maybe at the same time as another blocking thread = updates but
# never restarts
# therefore, with these issues, the next two lines cannot allow this feature to be brought into service :(
# if len(job_report):
# return '%s %s running' % (' and '.join(job_report), ('are', 'is')[1 == len(job_report)])
return False
class Job(object):
"""
The job class centralises tasks with states
"""
def __init__(self, func, silent=False, thread_lock=False, reentrant_lock=False, args=(), kwargs=None):
self.amActive = False
self._func = func
self._silent = silent
self._args = args
self._kwargs = (kwargs, {})[None is kwargs]
if thread_lock:
self.lock = threading.Lock()
elif reentrant_lock:
self.lock = threading.RLock()
def run(self):
if self.amActive and self.__class__.__name__ in ('BacklogSearcher', 'MediaProcess'):
logger.log(u'%s is still running, not starting it again' % self.__class__.__name__)
return
if self._func:
result, re_raise = None, False
try:
self.amActive = True
result = self._func(*self._args, **self._kwargs)
except(BaseException, Exception) as e:
re_raise = e
finally:
self.amActive = False
not self._silent and logger.log(u'%s(%s) completed' % (self.__class__.__name__, self._func.__name__))
if re_raise:
raise re_raise
return result

View file

@ -17,13 +17,13 @@
from __future__ import with_statement, division from __future__ import with_statement, division
import datetime import datetime
import threading
from math import ceil from math import ceil
import sickgear import sickgear
from . import db, logger, scheduler, search_queue, ui from . import db, logger, scheduler, search_queue, ui
from .helpers import find_show_by_id from .helpers import find_show_by_id
from .providers.generic import GenericProvider from .providers.generic import GenericProvider
from .scheduler import Job
from .search import wanted_episodes from .search import wanted_episodes
from .sgdatetime import SGDatetime from .sgdatetime import SGDatetime
from .tv import TVidProdid, TVEpisode, TVShow from .tv import TVidProdid, TVEpisode, TVShow
@ -74,13 +74,12 @@ class BacklogSearchScheduler(scheduler.Scheduler):
return self.action.nextBacklog - now if self.action.nextBacklog > now else datetime.timedelta(seconds=0) return self.action.nextBacklog - now if self.action.nextBacklog > now else datetime.timedelta(seconds=0)
class BacklogSearcher(object): class BacklogSearcher(Job):
def __init__(self): def __init__(self):
super(BacklogSearcher, self).__init__(self.job_run, kwargs={}, thread_lock=True)
self.last_backlog = self._get_last_backlog() self.last_backlog = self._get_last_backlog()
self.cycle_time = sickgear.BACKLOG_PERIOD self.cycle_time = sickgear.BACKLOG_PERIOD
self.lock = threading.Lock()
self.amActive = False # type: bool
self.amPaused = False # type: bool self.amPaused = False # type: bool
self.amWaiting = False # type: bool self.amWaiting = False # type: bool
self.forcetype = NORMAL_BACKLOG # type: int self.forcetype = NORMAL_BACKLOG # type: int
@ -196,9 +195,6 @@ class BacklogSearcher(object):
:return: nothing :return: nothing
:rtype: None :rtype: None
""" """
if self.amActive and not which_shows:
logger.debug('Backlog is still running, not starting it again')
return
if which_shows: if which_shows:
show_list = which_shows show_list = which_shows
@ -225,7 +221,6 @@ class BacklogSearcher(object):
return return
self._get_last_backlog() self._get_last_backlog()
self.amActive = True
self.amPaused = False self.amPaused = False
cur_date = datetime.date.today().toordinal() cur_date = datetime.date.today().toordinal()
@ -328,7 +323,6 @@ class BacklogSearcher(object):
if standard_backlog and not any_torrent_enabled: if standard_backlog and not any_torrent_enabled:
self._set_last_runtime(now) self._set_last_runtime(now)
self.amActive = False
self._reset_progress_indicator() self._reset_progress_indicator()
@staticmethod @staticmethod
@ -401,7 +395,7 @@ class BacklogSearcher(object):
# noinspection SqlConstantCondition # noinspection SqlConstantCondition
my_db.action('UPDATE info SET last_backlog=%s WHERE 1=1' % when) my_db.action('UPDATE info SET last_backlog=%s WHERE 1=1' % when)
def run(self): def job_run(self):
try: try:
force_type = self.forcetype force_type = self.forcetype
force = self.force force = self.force
@ -409,5 +403,4 @@ class BacklogSearcher(object):
self.force = False self.force = False
self.search_backlog(force_type=force_type, force=force) self.search_backlog(force_type=force_type, force=force)
except (BaseException, Exception): except (BaseException, Exception):
self.amActive = False
raise raise

View file

@ -16,24 +16,21 @@
from __future__ import with_statement from __future__ import with_statement
import threading
import sickgear import sickgear
from .scheduler import Job
class ProperSearcher(object): class ProperSearcher(Job):
def __init__(self): def __init__(self):
self.lock = threading.Lock() super(ProperSearcher, self).__init__(self.job_run, kwargs={}, thread_lock=True)
self.amActive = False
@staticmethod @staticmethod
def is_enabled(): def is_enabled():
# type: (...) -> bool # type: (...) -> bool
return sickgear.DOWNLOAD_PROPERS return sickgear.DOWNLOAD_PROPERS
def run(self): @staticmethod
self.amActive = True def job_run():
propersearch_queue_item = sickgear.search_queue.ProperSearchQueueItem() propersearch_queue_item = sickgear.search_queue.ProperSearchQueueItem()
sickgear.search_queue_scheduler.action.add_item(propersearch_queue_item) sickgear.search_queue_scheduler.action.add_item(propersearch_queue_item)
self.amActive = False

View file

@ -16,19 +16,15 @@
from __future__ import with_statement from __future__ import with_statement
import threading
import sickgear import sickgear
from .scheduler import Job
class RecentSearcher(object): class RecentSearcher(Job):
def __init__(self): def __init__(self):
self.lock = threading.Lock() super(RecentSearcher, self).__init__(self.job_run, kwargs={}, thread_lock=True)
self.amActive = False
def run(self): def job_run(self):
self.amActive = True
recentsearch_queue_item = sickgear.search_queue.RecentSearchQueueItem() recentsearch_queue_item = sickgear.search_queue.RecentSearchQueueItem()
sickgear.search_queue_scheduler.action.add_item(recentsearch_queue_item) sickgear.search_queue_scheduler.action.add_item(recentsearch_queue_item)
self.amActive = False

View file

@ -25,7 +25,7 @@ import sickgear
from . import common, db, logger from . import common, db, logger
from .helpers import sanitize_scene_name from .helpers import sanitize_scene_name
from .name_parser.parser import InvalidNameException, InvalidShowException, NameParser from .name_parser.parser import InvalidNameException, InvalidShowException, NameParser
from .scene_exceptions import get_scene_exceptions from .scene_exceptions import ReleaseMap
from sg_helpers import scantree from sg_helpers import scantree
from _23 import quote_plus from _23 import quote_plus
@ -384,10 +384,10 @@ def all_possible_show_names(show_obj, season=-1, force_anime=False):
:return: a list of all the possible show names :return: a list of all the possible show names
""" """
show_names = get_scene_exceptions(show_obj.tvid, show_obj.prodid, season=season)[:] show_names = ReleaseMap().get_alt_names(show_obj.tvid, show_obj.prodid, season)[:]
if not show_names: # if we don't have any season specific exceptions fallback to generic exceptions if -1 != season and not show_names: # fallback to generic exceptions if no season specific exceptions
season = -1 season = -1
show_names = get_scene_exceptions(show_obj.tvid, show_obj.prodid, season=season)[:] show_names = ReleaseMap().get_alt_names(show_obj.tvid, show_obj.prodid)[:]
if -1 == season: if -1 == season:
show_names.append(show_obj.name) show_names.append(show_obj.name)

View file

@ -70,7 +70,7 @@ class ShowQueue(generic_queue.GenericQueue):
def check_events(self): def check_events(self):
if self.daily_update_running and \ if self.daily_update_running and \
not (self.is_show_update_running() or sickgear.show_update_scheduler.action.amActive): not (self.is_show_update_running() or sickgear.update_show_scheduler.is_running_job):
self.execute_events(DAILY_SHOW_UPDATE_FINISHED_EVENT) self.execute_events(DAILY_SHOW_UPDATE_FINISHED_EVENT)
self.daily_update_running = False self.daily_update_running = False
@ -1139,7 +1139,7 @@ class QueueItemAdd(ShowQueueItem):
self.show_obj.tvid, self.show_obj.prodid) self.show_obj.tvid, self.show_obj.prodid)
# if "scene" numbering is disabled during add show, output availability to log # if "scene" numbering is disabled during add show, output availability to log
if None is not self.scene and not self.show_obj.scene and \ if None is not self.scene and not self.show_obj.scene and \
self.show_obj.prodid in sickgear.scene_exceptions.xem_ids_list[self.show_obj.tvid]: self.show_obj.prodid in sickgear.scene_exceptions.MEMCACHE['release_map_xem'][self.show_obj.tvid]:
logger.log('No scene number mappings found at TheXEM. Therefore, episode scene numbering disabled, ' logger.log('No scene number mappings found at TheXEM. Therefore, episode scene numbering disabled, '
'edit show and enable it to manually add custom numbers for search and media processing.') 'edit show and enable it to manually add custom numbers for search and media processing.')
try: try:
@ -1179,7 +1179,7 @@ class QueueItemAdd(ShowQueueItem):
# if started with WANTED eps then run the backlog # if started with WANTED eps then run the backlog
if WANTED == self.default_status or items_wanted: if WANTED == self.default_status or items_wanted:
logger.log('Launching backlog for this show since episodes are WANTED') logger.log('Launching backlog for this show since episodes are WANTED')
sickgear.backlog_search_scheduler.action.search_backlog([self.show_obj], prevent_same=True) sickgear.search_backlog_scheduler.action.search_backlog([self.show_obj], prevent_same=True)
ui.notifications.message('Show added/search', 'Adding and searching for episodes of' + msg) ui.notifications.message('Show added/search', 'Adding and searching for episodes of' + msg)
else: else:
ui.notifications.message('Show added', 'Adding' + msg) ui.notifications.message('Show added', 'Adding' + msg)
@ -1253,7 +1253,7 @@ class QueueItemRefresh(ShowQueueItem):
self.show_obj.populate_cache(self.force_image_cache) self.show_obj.populate_cache(self.force_image_cache)
# Load XEM data to DB for show # Load XEM data to DB for show
if self.show_obj.prodid in sickgear.scene_exceptions.xem_ids_list[self.show_obj.tvid]: if self.show_obj.prodid in sickgear.scene_exceptions.MEMCACHE['release_map_xem'][self.show_obj.tvid]:
sickgear.scene_numbering.xem_refresh(self.show_obj.tvid, self.show_obj.prodid) sickgear.scene_numbering.xem_refresh(self.show_obj.tvid, self.show_obj.prodid)
if 'pausestatus_after' in self.kwargs and None is not self.kwargs['pausestatus_after']: if 'pausestatus_after' in self.kwargs and None is not self.kwargs['pausestatus_after']:

View file

@ -23,6 +23,7 @@ from exceptions_helper import ex
import sickgear import sickgear
from . import db, logger, network_timezones, properFinder, ui from . import db, logger, network_timezones, properFinder, ui
from .scheduler import Job
# noinspection PyUnreachableCode # noinspection PyUnreachableCode
if False: if False:
@ -54,13 +55,12 @@ def clean_ignore_require_words():
pass pass
class ShowUpdater(object): class ShowUpdater(Job):
def __init__(self): def __init__(self):
self.amActive = False super(ShowUpdater, self).__init__(self.job_run, kwargs={})
def run(self): @staticmethod
def job_run():
self.amActive = True
try: try:
update_datetime = datetime.datetime.now() update_datetime = datetime.datetime.now()
@ -89,14 +89,14 @@ class ShowUpdater(object):
# update xem id lists # update xem id lists
try: try:
sickgear.scene_exceptions.get_xem_ids() sickgear.scene_exceptions.ReleaseMap().fetch_xem_ids()
except (BaseException, Exception): except (BaseException, Exception):
logger.error('xem id list update error') logger.error('xem id list update error')
logger.error(traceback.format_exc()) logger.error(traceback.format_exc())
# update scene exceptions # update scene exceptions
try: try:
sickgear.scene_exceptions.retrieve_exceptions() sickgear.scene_exceptions.ReleaseMap().fetch_exceptions()
except (BaseException, Exception): except (BaseException, Exception):
logger.error('scene exceptions update error') logger.error('scene exceptions update error')
logger.error(traceback.format_exc()) logger.error(traceback.format_exc())
@ -147,7 +147,7 @@ class ShowUpdater(object):
import threading import threading
try: try:
sickgear.background_mapping_task = threading.Thread( sickgear.background_mapping_task = threading.Thread(
name='MAPPINGSUPDATER', target=sickgear.indexermapper.load_mapped_ids, kwargs={'update': True}) name='MAPPINGUPDATES', target=sickgear.indexermapper.load_mapped_ids, kwargs={'update': True})
sickgear.background_mapping_task.start() sickgear.background_mapping_task.start()
except (BaseException, Exception): except (BaseException, Exception):
logger.error('missing mapped ids update error') logger.error('missing mapped ids update error')
@ -224,8 +224,8 @@ class ShowUpdater(object):
logger.log('Added all shows to show queue for full update') logger.log('Added all shows to show queue for full update')
finally: except(BaseException, Exception):
self.amActive = False pass
def __del__(self): def __del__(self):
pass pass

View file

@ -19,6 +19,7 @@ import datetime
from . import db, helpers, logger from . import db, helpers, logger
from .common import * from .common import *
from .scheduler import Job
import sickgear import sickgear
@ -103,24 +104,22 @@ def subtitle_language_filter():
return [language for language in subliminal.language.LANGUAGES if language[2] != ""] return [language for language in subliminal.language.LANGUAGES if language[2] != ""]
class SubtitlesFinder(object): class SubtitlesFinder(Job):
""" """
The SubtitlesFinder will be executed every hour but will not necessarily search The SubtitlesFinder will be executed every hour but will not necessarily search
and download subtitles. Only if the defined rule is true and download subtitles. Only if the defined rule is true
""" """
def __init__(self): def __init__(self):
self.amActive = False super(SubtitlesFinder, self).__init__(self.job_run, kwargs={}, thread_lock=True)
@staticmethod @staticmethod
def is_enabled(): def is_enabled():
return sickgear.USE_SUBTITLES return sickgear.USE_SUBTITLES
def run(self): def job_run(self):
if self.is_enabled(): if self.is_enabled():
self.amActive = True
self._main() self._main()
self.amActive = False
def _main(self): def _main(self):
if 1 > len(sickgear.subtitles.get_enabled_service_list()): if 1 > len(sickgear.subtitles.get_enabled_service_list()):

View file

@ -29,6 +29,7 @@ from exceptions_helper import ex
import sickgear import sickgear
from . import logger, notifiers, ui from . import logger, notifiers, ui
from .scheduler import (Scheduler, Job)
from .piper import check_pip_outdated from .piper import check_pip_outdated
from sg_helpers import cmdline_runner, get_url from sg_helpers import cmdline_runner, get_url
@ -41,12 +42,14 @@ if False:
from typing import Tuple from typing import Tuple
class PackagesUpdater(object): class PackagesUpdater(Job):
def __init__(self): def __init__(self):
super(PackagesUpdater, self).__init__(self.job_run, kwargs={})
self.install_type = 'Python package updates' self.install_type = 'Python package updates'
def run(self, force=False): def job_run(self, force=False):
if not sickgear.EXT_UPDATES \ if not sickgear.EXT_UPDATES \
and self.check_for_new_version(force) \ and self.check_for_new_version(force) \
and sickgear.UPDATE_PACKAGES_AUTO: and sickgear.UPDATE_PACKAGES_AUTO:
@ -64,6 +67,11 @@ class PackagesUpdater(object):
:returns: True when package install/updates are available :returns: True when package install/updates are available
:rtype: bool :rtype: bool
""" """
response = Scheduler.blocking_jobs()
if response:
logger.log(f'Update skipped because {response}', logger.DEBUG)
return False
if force and not sickgear.UPDATE_PACKAGES_MENU: if force and not sickgear.UPDATE_PACKAGES_MENU:
logger.log('Checking not enabled from menu action for %s' % self.install_type) logger.log('Checking not enabled from menu action for %s' % self.install_type)
return False return False
@ -100,12 +108,14 @@ class PackagesUpdater(object):
return True return True
class SoftwareUpdater(object): class SoftwareUpdater(Job):
""" """
Version check class meant to run as a thread object with the sg scheduler. Version check class meant to run as a thread object with the sg scheduler.
""" """
def __init__(self): def __init__(self):
super(SoftwareUpdater, self).__init__(self.job_run, kwargs={})
self._min_python = (100, 0) # set default to absurdly high to prevent update self._min_python = (100, 0) # set default to absurdly high to prevent update
self.install_type = self.find_install_type() self.install_type = self.find_install_type()
@ -150,7 +160,7 @@ class SoftwareUpdater(object):
except (BaseException, Exception): except (BaseException, Exception):
pass pass
def run(self, force=False): def job_run(self, force=False):
# set current branch version # set current branch version
sickgear.BRANCH = self.get_branch() sickgear.BRANCH = self.get_branch()
@ -219,6 +229,11 @@ class SoftwareUpdater(object):
# update branch with current config branch value # update branch with current config branch value
self.updater.branch = sickgear.BRANCH self.updater.branch = sickgear.BRANCH
response = Scheduler.blocking_jobs()
if response:
logger.log(f'Update skipped because {response}', logger.DEBUG)
return False
if not self.is_updatable: if not self.is_updatable:
self._log_cannot_update() self._log_cannot_update()
return False return False

View file

@ -14,17 +14,15 @@
# You should have received a copy of the GNU General Public License # You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>. # along with SickGear. If not, see <http://www.gnu.org/licenses/>.
import threading
import sickgear import sickgear
from .scheduler import Job
from sickgear import watchedstate_queue from sickgear import watchedstate_queue
class WatchedStateUpdater(object): class WatchedStateUpdater(Job):
def __init__(self, name, queue_item): def __init__(self, name, queue_item):
super(WatchedStateUpdater, self).__init__(self.job_run, silent=True, kwargs={}, thread_lock=True)
self.amActive = False
self.lock = threading.Lock()
self.name = name self.name = name
self.queue_item = queue_item self.queue_item = queue_item
@ -32,13 +30,15 @@ class WatchedStateUpdater(object):
def prevent_run(self): def prevent_run(self):
return sickgear.watched_state_queue_scheduler.action.is_in_queue(self.queue_item) return sickgear.watched_state_queue_scheduler.action.is_in_queue(self.queue_item)
def run(self): @staticmethod
def is_enabled():
return True
def job_run(self):
# noinspection PyUnresolvedReferences # noinspection PyUnresolvedReferences
if self.is_enabled(): if self.is_enabled():
self.amActive = True
new_item = self.queue_item() new_item = self.queue_item()
sickgear.watched_state_queue_scheduler.action.add_item(new_item) sickgear.watched_state_queue_scheduler.action.add_item(new_item)
self.amActive = False
class EmbyWatchedStateUpdater(WatchedStateUpdater): class EmbyWatchedStateUpdater(WatchedStateUpdater):

View file

@ -48,6 +48,7 @@ from .indexers import indexer_api, indexer_config
from .indexers.indexer_config import * from .indexers.indexer_config import *
from lib.tvinfo_base.exceptions import * from lib.tvinfo_base.exceptions import *
from .scene_numbering import set_scene_numbering_helper from .scene_numbering import set_scene_numbering_helper
from .scheduler import Scheduler
from .search_backlog import FORCED_BACKLOG from .search_backlog import FORCED_BACKLOG
from .show_updater import clean_ignore_require_words from .show_updater import clean_ignore_require_words
from .sgdatetime import SGDatetime from .sgdatetime import SGDatetime
@ -1915,9 +1916,9 @@ class CMD_SickGearPostProcess(ApiCall):
if not self.type: if not self.type:
self.type = 'manual' self.type = 'manual'
data = processTV.processDir(self.path, process_method=self.process_method, force=self.force_replace, data = processTV.process_dir(self.path, process_method=self.process_method, force=self.force_replace,
force_replace=self.is_priority, failed=self.failed, pp_type=self.type, force_replace=self.is_priority, failed=self.failed, pp_type=self.type,
client=self.client) client=self.client)
if not self.return_data: if not self.return_data:
data = "" data = ""
@ -2074,7 +2075,7 @@ class CMD_SickGearCheckScheduler(ApiCall):
backlogPaused = sickgear.search_queue_scheduler.action.is_backlog_paused() backlogPaused = sickgear.search_queue_scheduler.action.is_backlog_paused()
backlogRunning = sickgear.search_queue_scheduler.action.is_backlog_in_progress() backlogRunning = sickgear.search_queue_scheduler.action.is_backlog_in_progress()
nextBacklog = sickgear.backlog_search_scheduler.next_run().strftime(dateFormat) nextBacklog = sickgear.search_backlog_scheduler.next_run().strftime(dateFormat)
data = {"backlog_is_paused": int(backlogPaused), "backlog_is_running": int(backlogRunning), data = {"backlog_is_paused": int(backlogPaused), "backlog_is_running": int(backlogRunning),
"last_backlog": (0 < len(sql_result) and _ordinal_to_dateForm(sql_result[0]["last_backlog"])) or '', "last_backlog": (0 < len(sql_result) and _ordinal_to_dateForm(sql_result[0]["last_backlog"])) or '',
@ -2177,15 +2178,15 @@ class CMD_SickGearForceSearch(ApiCall):
""" force the specified search type to run """ """ force the specified search type to run """
result = None result = None
if 'recent' == self.searchtype and not sickgear.search_queue_scheduler.action.is_recentsearch_in_progress() \ if 'recent' == self.searchtype and not sickgear.search_queue_scheduler.action.is_recentsearch_in_progress() \
and not sickgear.recent_search_scheduler.action.amActive: and not sickgear.search_recent_scheduler.is_running_job:
result = sickgear.recent_search_scheduler.force_run() result = sickgear.search_recent_scheduler.force_run()
elif 'backlog' == self.searchtype and not sickgear.search_queue_scheduler.action.is_backlog_in_progress() \ elif 'backlog' == self.searchtype and not sickgear.search_queue_scheduler.action.is_backlog_in_progress() \
and not sickgear.backlog_search_scheduler.action.amActive: and not sickgear.search_backlog_scheduler.is_running_job:
sickgear.backlog_search_scheduler.force_search(force_type=FORCED_BACKLOG) sickgear.search_backlog_scheduler.force_search(force_type=FORCED_BACKLOG)
result = True result = True
elif 'proper' == self.searchtype and not sickgear.search_queue_scheduler.action.is_propersearch_in_progress() \ elif 'proper' == self.searchtype and not sickgear.search_queue_scheduler.action.is_propersearch_in_progress() \
and not sickgear.proper_finder_scheduler.action.amActive: and not sickgear.search_propers_scheduler.is_running_job:
result = sickgear.proper_finder_scheduler.force_run() result = sickgear.search_propers_scheduler.force_run()
if result: if result:
return _responds(RESULT_SUCCESS, msg='%s search successfully forced' % self.searchtype) return _responds(RESULT_SUCCESS, msg='%s search successfully forced' % self.searchtype)
return _responds(RESULT_FAILURE, return _responds(RESULT_FAILURE,
@ -2499,6 +2500,13 @@ class CMD_SickGearRestart(ApiCall):
def run(self): def run(self):
""" restart sickgear """ """ restart sickgear """
response = Scheduler.blocking_jobs()
if response:
msg = f'Restart aborted from API because {response.lower()}'
logger.log(msg, logger.DEBUG)
return _responds(RESULT_FAILURE, msg=msg)
sickgear.restart(soft=False) sickgear.restart(soft=False)
return _responds(RESULT_SUCCESS, msg="SickGear is restarting...") return _responds(RESULT_SUCCESS, msg="SickGear is restarting...")
@ -2817,6 +2825,13 @@ class CMD_SickGearShutdown(ApiCall):
def run(self): def run(self):
""" shutdown sickgear """ """ shutdown sickgear """
response = Scheduler.blocking_jobs()
if response:
msg = f'Shutdown aborted from API because {response.lower()}'
logger.log(msg, logger.DEBUG)
return _responds(RESULT_FAILURE, msg=msg)
sickgear.events.put(sickgear.events.SystemEvent.SHUTDOWN) sickgear.events.put(sickgear.events.SystemEvent.SHUTDOWN)
return _responds(RESULT_SUCCESS, msg="SickGear is shutting down...") return _responds(RESULT_SUCCESS, msg="SickGear is shutting down...")
@ -4668,10 +4683,10 @@ class CMD_SickGearShowsForceUpdate(ApiCall):
def run(self): def run(self):
""" force the daily show update now """ """ force the daily show update now """
if sickgear.show_queue_scheduler.action.is_show_update_running() \ if sickgear.show_queue_scheduler.action.is_show_update_running() \
or sickgear.show_update_scheduler.action.amActive: or sickgear.update_show_scheduler.is_running_job:
return _responds(RESULT_FAILURE, msg="show update already running.") return _responds(RESULT_FAILURE, msg="show update already running.")
result = sickgear.show_update_scheduler.force_run() result = sickgear.update_show_scheduler.force_run()
if result: if result:
return _responds(RESULT_SUCCESS, msg="daily show update started") return _responds(RESULT_SUCCESS, msg="daily show update started")
return _responds(RESULT_FAILURE, msg="can't start show update currently") return _responds(RESULT_FAILURE, msg="can't start show update currently")

View file

@ -65,6 +65,7 @@ from .name_parser.parser import InvalidNameException, InvalidShowException, Name
from .providers import newznab, rsstorrent from .providers import newznab, rsstorrent
from .scene_numbering import get_scene_absolute_numbering_for_show, get_scene_numbering_for_show, \ from .scene_numbering import get_scene_absolute_numbering_for_show, get_scene_numbering_for_show, \
get_xem_absolute_numbering_for_show, get_xem_numbering_for_show, set_scene_numbering_helper get_xem_absolute_numbering_for_show, get_xem_numbering_for_show, set_scene_numbering_helper
from .scheduler import Scheduler
from .search_backlog import FORCED_BACKLOG from .search_backlog import FORCED_BACKLOG
from .sgdatetime import SGDatetime from .sgdatetime import SGDatetime
from .show_name_helpers import abbr_showname from .show_name_helpers import abbr_showname
@ -1327,8 +1328,8 @@ class MainHandler(WebHandler):
now = datetime.datetime.now() now = datetime.datetime.now()
events = [ events = [
('recent', sickgear.recent_search_scheduler.time_left), ('recent', sickgear.search_recent_scheduler.time_left),
('backlog', sickgear.backlog_search_scheduler.next_backlog_timeleft), ('backlog', sickgear.search_backlog_scheduler.next_backlog_timeleft),
] ]
if sickgear.DOWNLOAD_PROPERS: if sickgear.DOWNLOAD_PROPERS:
@ -2070,6 +2071,9 @@ class Home(MainHandler):
if str(pid) != str(sickgear.PID): if str(pid) != str(sickgear.PID):
return self.redirect('/home/') return self.redirect('/home/')
if self.maybe_ignore('Shutdown'):
return
t = PageTemplate(web_handler=self, file='restart.tmpl') t = PageTemplate(web_handler=self, file='restart.tmpl')
t.shutdown = True t.shutdown = True
@ -2082,6 +2086,9 @@ class Home(MainHandler):
if str(pid) != str(sickgear.PID): if str(pid) != str(sickgear.PID):
return self.redirect('/home/') return self.redirect('/home/')
if self.maybe_ignore('Restart'):
return
t = PageTemplate(web_handler=self, file='restart.tmpl') t = PageTemplate(web_handler=self, file='restart.tmpl')
t.shutdown = False t.shutdown = False
@ -2089,6 +2096,17 @@ class Home(MainHandler):
return t.respond() return t.respond()
def maybe_ignore(self, task):
response = Scheduler.blocking_jobs()
if response:
task and logger.log('%s aborted because %s' % (task, response.lower()), logger.DEBUG)
self.redirect(self.request.headers['Referer'])
if task:
ui.notifications.message(u'Fail %s because %s, please try later' % (task.lower(), response.lower()))
return True
return False
def update(self, pid=None): def update(self, pid=None):
if str(pid) != str(sickgear.PID): if str(pid) != str(sickgear.PID):
@ -2326,15 +2344,15 @@ class Home(MainHandler):
t.season_min = ([], [1])[2 < t.latest_season] + [t.latest_season] t.season_min = ([], [1])[2 < t.latest_season] + [t.latest_season]
t.other_seasons = (list(set(all_seasons) - set(t.season_min)), [])[display_show_minimum] t.other_seasons = (list(set(all_seasons) - set(t.season_min)), [])[display_show_minimum]
t.seasons = [] t.seasons = []
for x in all_seasons: for cur_season in all_seasons:
t.seasons += [(x, [None] if x not in (t.season_min + t.other_seasons) else my_db.select( t.seasons += [(cur_season, [None] if cur_season not in (t.season_min + t.other_seasons) else my_db.select(
'SELECT *' 'SELECT *'
' FROM tv_episodes' ' FROM tv_episodes'
' WHERE indexer = ? AND showid = ?' ' WHERE indexer = ? AND showid = ?'
' AND season = ?' ' AND season = ?'
' ORDER BY episode DESC', ' ORDER BY episode DESC',
[show_obj.tvid, show_obj.prodid, x] [show_obj.tvid, show_obj.prodid, cur_season]
), scene_exceptions.has_season_exceptions(show_obj.tvid, show_obj.prodid, x))] ), scene_exceptions.ReleaseMap().has_season_exceptions(show_obj.tvid, show_obj.prodid, cur_season))]
for row in my_db.select('SELECT season, episode, status' for row in my_db.select('SELECT season, episode, status'
' FROM tv_episodes' ' FROM tv_episodes'
@ -2423,7 +2441,7 @@ class Home(MainHandler):
t.clean_show_name = quote_plus(sickgear.indexermapper.clean_show_name(show_obj.name)) t.clean_show_name = quote_plus(sickgear.indexermapper.clean_show_name(show_obj.name))
t.min_initial = Quality.get_quality_ui(min(Quality.split_quality(show_obj.quality)[0])) t.min_initial = Quality.get_quality_ui(min(Quality.split_quality(show_obj.quality)[0]))
t.show_obj.exceptions = scene_exceptions.get_scene_exceptions(show_obj.tvid, show_obj.prodid) t.show_obj.exceptions = scene_exceptions.ReleaseMap().get_alt_names(show_obj.tvid, show_obj.prodid)
# noinspection PyUnresolvedReferences # noinspection PyUnresolvedReferences
t.all_scene_exceptions = show_obj.exceptions # normally Unresolved as not a class attribute, force set above t.all_scene_exceptions = show_obj.exceptions # normally Unresolved as not a class attribute, force set above
t.scene_numbering = get_scene_numbering_for_show(show_obj.tvid, show_obj.prodid) t.scene_numbering = get_scene_numbering_for_show(show_obj.tvid, show_obj.prodid)
@ -2562,7 +2580,7 @@ class Home(MainHandler):
@staticmethod @staticmethod
def scene_exceptions(tvid_prodid, wanted_season=None): def scene_exceptions(tvid_prodid, wanted_season=None):
exceptions_list = sickgear.scene_exceptions.get_all_scene_exceptions(tvid_prodid) exceptions_list = scene_exceptions.ReleaseMap().get_show_exceptions(tvid_prodid)
wanted_season = helpers.try_int(wanted_season, None) wanted_season = helpers.try_int(wanted_season, None)
wanted_not_found = None is not wanted_season and wanted_season not in exceptions_list wanted_not_found = None is not wanted_season and wanted_season not in exceptions_list
if not exceptions_list or wanted_not_found: if not exceptions_list or wanted_not_found:
@ -2754,7 +2772,7 @@ class Home(MainHandler):
return [err_string] return [err_string]
return self._generic_message('Error', err_string) return self._generic_message('Error', err_string)
show_obj.exceptions = scene_exceptions.get_all_scene_exceptions(tvid_prodid) show_obj.exceptions = scene_exceptions.ReleaseMap().get_show_exceptions(tvid_prodid)
if None is not quality_preset and int(quality_preset): if None is not quality_preset and int(quality_preset):
best_qualities = [] best_qualities = []
@ -2971,7 +2989,7 @@ class Home(MainHandler):
if do_update_exceptions: if do_update_exceptions:
try: try:
scene_exceptions.update_scene_exceptions(show_obj.tvid, show_obj.prodid, exceptions_list) scene_exceptions.ReleaseMap().update_exceptions(show_obj, exceptions_list)
helpers.cpu_sleep() helpers.cpu_sleep()
except exceptions_helper.CantUpdateException: except exceptions_helper.CantUpdateException:
errors.append('Unable to force an update on scene exceptions of the show.') errors.append('Unable to force an update on scene exceptions of the show.')
@ -3912,16 +3930,16 @@ class HomeProcessMedia(Home):
m = sickgear.NZBGET_MAP.split('=') m = sickgear.NZBGET_MAP.split('=')
dir_name, not_used = helpers.path_mapper(m[0], m[1], dir_name) dir_name, not_used = helpers.path_mapper(m[0], m[1], dir_name)
result = processTV.processDir(dir_name if dir_name else None, result = processTV.process_dir(dir_name if dir_name else None,
None if not nzb_name else decode_str(nzb_name), None if not nzb_name else decode_str(nzb_name),
process_method=process_method, pp_type=process_type, process_method=process_method, pp_type=process_type,
cleanup=cleanup, cleanup=cleanup,
force=force in ('on', '1'), force=force in ('on', '1'),
force_replace=force_replace in ('on', '1'), force_replace=force_replace in ('on', '1'),
failed='0' != failed, failed='0' != failed,
webhandler=None if '0' == stream else self.send_message, webhandler=None if '0' == stream else self.send_message,
show_obj=show_obj, is_basedir=is_basedir in ('on', '1'), show_obj=show_obj, is_basedir=is_basedir in ('on', '1'),
skip_failure_processing=skip_failure_processing, client=client) skip_failure_processing=skip_failure_processing, client=client)
if '0' == stream: if '0' == stream:
regexp = re.compile(r'(?i)<br[\s/]+>', flags=re.UNICODE) regexp = re.compile(r'(?i)<br[\s/]+>', flags=re.UNICODE)
@ -4448,7 +4466,7 @@ class AddShows(Home):
t.blocklist = [] t.blocklist = []
t.groups = [] t.groups = []
t.show_scene_maps = list(itervalues(sickgear.scene_exceptions.xem_ids_list)) t.show_scene_maps = list(itervalues(scene_exceptions.MEMCACHE['release_map_xem']))
has_shows = len(sickgear.showList) has_shows = len(sickgear.showList)
t.try_id = [] # [dict try_tip: try_term] t.try_id = [] # [dict try_tip: try_term]
@ -6616,7 +6634,7 @@ class Manage(MainHandler):
show_obj = helpers.find_show_by_id(tvid_prodid) show_obj = helpers.find_show_by_id(tvid_prodid)
if show_obj: if show_obj:
sickgear.backlog_search_scheduler.action.search_backlog([show_obj]) sickgear.search_backlog_scheduler.action.search_backlog([show_obj])
self.redirect('/manage/backlog-overview/') self.redirect('/manage/backlog-overview/')
@ -7116,11 +7134,11 @@ class ManageSearch(Manage):
def index(self): def index(self):
t = PageTemplate(web_handler=self, file='manage_manageSearches.tmpl') t = PageTemplate(web_handler=self, file='manage_manageSearches.tmpl')
# t.backlog_pi = sickgear.backlog_search_scheduler.action.get_progress_indicator() # t.backlog_pi = sickgear.search_backlog_scheduler.action.get_progress_indicator()
t.backlog_paused = sickgear.search_queue_scheduler.action.is_backlog_paused() t.backlog_paused = sickgear.search_queue_scheduler.action.is_backlog_paused()
t.scheduled_backlog_active_providers = sickgear.search_backlog.BacklogSearcher.providers_active(scheduled=True) t.scheduled_backlog_active_providers = sickgear.search_backlog.BacklogSearcher.providers_active(scheduled=True)
t.backlog_running = sickgear.search_queue_scheduler.action.is_backlog_in_progress() t.backlog_running = sickgear.search_queue_scheduler.action.is_backlog_in_progress()
t.backlog_is_active = sickgear.backlog_search_scheduler.action.am_running() t.backlog_is_active = sickgear.search_backlog_scheduler.action.am_running()
t.standard_backlog_running = sickgear.search_queue_scheduler.action.is_standard_backlog_in_progress() t.standard_backlog_running = sickgear.search_queue_scheduler.action.is_standard_backlog_in_progress()
t.backlog_running_type = sickgear.search_queue_scheduler.action.type_of_backlog_in_progress() t.backlog_running_type = sickgear.search_queue_scheduler.action.type_of_backlog_in_progress()
t.recent_search_status = sickgear.search_queue_scheduler.action.is_recentsearch_in_progress() t.recent_search_status = sickgear.search_queue_scheduler.action.is_recentsearch_in_progress()
@ -7161,7 +7179,7 @@ class ManageSearch(Manage):
def force_backlog(self): def force_backlog(self):
# force it to run the next time it looks # force it to run the next time it looks
if not sickgear.search_queue_scheduler.action.is_standard_backlog_in_progress(): if not sickgear.search_queue_scheduler.action.is_standard_backlog_in_progress():
sickgear.backlog_search_scheduler.force_search(force_type=FORCED_BACKLOG) sickgear.search_backlog_scheduler.force_search(force_type=FORCED_BACKLOG)
logger.log('Backlog search forced') logger.log('Backlog search forced')
ui.notifications.message('Backlog search started') ui.notifications.message('Backlog search started')
@ -7172,7 +7190,7 @@ class ManageSearch(Manage):
# force it to run the next time it looks # force it to run the next time it looks
if not sickgear.search_queue_scheduler.action.is_recentsearch_in_progress(): if not sickgear.search_queue_scheduler.action.is_recentsearch_in_progress():
result = sickgear.recent_search_scheduler.force_run() result = sickgear.search_recent_scheduler.force_run()
if result: if result:
logger.log('Recent search forced') logger.log('Recent search forced')
ui.notifications.message('Recent search started') ui.notifications.message('Recent search started')
@ -7183,7 +7201,7 @@ class ManageSearch(Manage):
def force_find_propers(self): def force_find_propers(self):
# force it to run the next time it looks # force it to run the next time it looks
result = sickgear.proper_finder_scheduler.force_run() result = sickgear.search_propers_scheduler.force_run()
if result: if result:
logger.log('Find propers search forced') logger.log('Find propers search forced')
ui.notifications.message('Find propers search started') ui.notifications.message('Find propers search started')
@ -7207,10 +7225,10 @@ class ShowTasks(Manage):
t = PageTemplate(web_handler=self, file='manage_showProcesses.tmpl') t = PageTemplate(web_handler=self, file='manage_showProcesses.tmpl')
t.queue_length = sickgear.show_queue_scheduler.action.queue_length() t.queue_length = sickgear.show_queue_scheduler.action.queue_length()
t.people_queue = sickgear.people_queue_scheduler.action.queue_data() t.people_queue = sickgear.people_queue_scheduler.action.queue_data()
t.next_run = sickgear.show_update_scheduler.last_run.replace( t.next_run = sickgear.update_show_scheduler.last_run.replace(
hour=sickgear.show_update_scheduler.start_time.hour) hour=sickgear.update_show_scheduler.start_time.hour)
t.show_update_running = sickgear.show_queue_scheduler.action.is_show_update_running() \ t.show_update_running = sickgear.show_queue_scheduler.action.is_show_update_running() \
or sickgear.show_update_scheduler.action.amActive or sickgear.update_show_scheduler.is_running_job
my_db = db.DBConnection(row_type='dict') my_db = db.DBConnection(row_type='dict')
sql_result = my_db.select('SELECT n.indexer || ? || n.indexer_id AS tvid_prodid,' sql_result = my_db.select('SELECT n.indexer || ? || n.indexer_id AS tvid_prodid,'
@ -7293,7 +7311,7 @@ class ShowTasks(Manage):
def force_show_update(self): def force_show_update(self):
result = sickgear.show_update_scheduler.force_run() result = sickgear.update_show_scheduler.force_run()
if result: if result:
logger.log('Show Update forced') logger.log('Show Update forced')
ui.notifications.message('Forced Show Update started') ui.notifications.message('Forced Show Update started')
@ -7982,7 +8000,7 @@ class ConfigGeneral(Config):
def update_alt(): def update_alt():
""" Load scene exceptions """ """ Load scene exceptions """
changed_exceptions, cnt_updated_numbers, min_remain_iv = scene_exceptions.retrieve_exceptions() changed_exceptions, cnt_updated_numbers, min_remain_iv = scene_exceptions.ReleaseMap().fetch_exceptions()
return json_dumps(dict(names=int(changed_exceptions), numbers=cnt_updated_numbers, min_remain_iv=min_remain_iv)) return json_dumps(dict(names=int(changed_exceptions), numbers=cnt_updated_numbers, min_remain_iv=min_remain_iv))
@ -7991,7 +8009,7 @@ class ConfigGeneral(Config):
""" Return alternative release names and numbering as json text""" """ Return alternative release names and numbering as json text"""
# alternative release names and numbers # alternative release names and numbers
alt_names = scene_exceptions.get_all_scene_exceptions(tvid_prodid) alt_names = scene_exceptions.ReleaseMap().get_show_exceptions(tvid_prodid)
alt_numbers = get_scene_numbering_for_show(*TVidProdid(tvid_prodid).tuple) # arbitrary order alt_numbers = get_scene_numbering_for_show(*TVidProdid(tvid_prodid).tuple) # arbitrary order
ui_output = 'No alternative names or numbers to export' ui_output = 'No alternative names or numbers to export'
@ -8180,8 +8198,8 @@ class ConfigGeneral(Config):
sickgear.UPDATE_SHOWS_ON_START = config.checkbox_to_value(update_shows_on_start) sickgear.UPDATE_SHOWS_ON_START = config.checkbox_to_value(update_shows_on_start)
sickgear.SHOW_UPDATE_HOUR = config.minimax(show_update_hour, 3, 0, 23) sickgear.SHOW_UPDATE_HOUR = config.minimax(show_update_hour, 3, 0, 23)
try: try:
with sickgear.show_update_scheduler.lock: with sickgear.update_show_scheduler.lock:
sickgear.show_update_scheduler.start_time = datetime.time(hour=sickgear.SHOW_UPDATE_HOUR) sickgear.update_show_scheduler.start_time = datetime.time(hour=sickgear.SHOW_UPDATE_HOUR)
except (BaseException, Exception) as e: except (BaseException, Exception) as e:
logger.error('Could not change Show Update Scheduler time: %s' % ex(e)) logger.error('Could not change Show Update Scheduler time: %s' % ex(e))
sickgear.TRASH_REMOVE_SHOW = config.checkbox_to_value(trash_remove_show) sickgear.TRASH_REMOVE_SHOW = config.checkbox_to_value(trash_remove_show)

View file

@ -74,14 +74,15 @@ class SceneExceptionTestCase(test.SickbeardTestDBCase):
sickgear.showList.append(s) sickgear.showList.append(s)
sickgear.showDict[s.sid_int] = s sickgear.showDict[s.sid_int] = s
sickgear.webserve.Home.make_showlist_unique_names() sickgear.webserve.Home.make_showlist_unique_names()
scene_exceptions.retrieve_exceptions() scene_exceptions.ReleaseMap().fetch_exceptions()
name_cache.build_name_cache() name_cache.build_name_cache()
def test_sceneExceptionsEmpty(self): def test_sceneExceptionsEmpty(self):
self.assertEqual(scene_exceptions.get_scene_exceptions(0, 0), []) self.assertEqual(scene_exceptions.ReleaseMap().get_alt_names(0, 0), [])
def test_sceneExceptionsBlack_Lagoon(self): def test_sceneExceptionsBlack_Lagoon(self):
self.assertEqual(sorted(scene_exceptions.get_scene_exceptions(1, 79604)), ['Black-Lagoon']) self.assertEqual(sorted(
scene_exceptions.ReleaseMap().get_alt_names(1, 79604)), ['Black-Lagoon'])
def test_sceneExceptionByName(self): def test_sceneExceptionByName(self):
self.assertEqual(scene_exceptions.get_scene_exception_by_name( self.assertEqual(scene_exceptions.get_scene_exception_by_name(
@ -98,14 +99,18 @@ class SceneExceptionTestCase(test.SickbeardTestDBCase):
s.anime = 1 s.anime = 1
sickgear.showList.append(s) sickgear.showList.append(s)
sickgear.showDict[s.sid_int] = s sickgear.showDict[s.sid_int] = s
scene_exceptions.retrieve_exceptions() scene_exceptions.ReleaseMap().fetch_exceptions()
name_cache.build_name_cache() name_cache.build_name_cache()
self.assertEqual(scene_exceptions.get_scene_exception_by_name('ブラック・ラグーン'), [1, 79604, -1]) self.assertEqual(scene_exceptions.get_scene_exception_by_name(
self.assertEqual(scene_exceptions.get_scene_exception_by_name('Burakku Ragūn'), [1, 79604, -1]) 'ブラック・ラグーン'), [1, 79604, -1])
self.assertEqual(scene_exceptions.get_scene_exception_by_name('Rokka no Yuusha'), [1, 295243, -1]) self.assertEqual(scene_exceptions.get_scene_exception_by_name(
'Burakku Ragūn'), [1, 79604, -1])
self.assertEqual(scene_exceptions.get_scene_exception_by_name(
'Rokka no Yuusha'), [1, 295243, -1])
def test_sceneExceptionByNameEmpty(self): def test_sceneExceptionByNameEmpty(self):
self.assertEqual(scene_exceptions.get_scene_exception_by_name('nothing useful'), [None, None, None]) self.assertEqual(scene_exceptions.get_scene_exception_by_name(
'nothing useful'), [None, None, None])
def test_sceneExceptionsResetNameCache(self): def test_sceneExceptionsResetNameCache(self):
# clear the exceptions # clear the exceptions
@ -117,7 +122,7 @@ class SceneExceptionTestCase(test.SickbeardTestDBCase):
name_cache.add_name_to_cache('Cached Name', prodid=0) name_cache.add_name_to_cache('Cached Name', prodid=0)
# updating should not clear the cache this time since our exceptions didn't change # updating should not clear the cache this time since our exceptions didn't change
scene_exceptions.retrieve_exceptions() scene_exceptions.ReleaseMap().fetch_exceptions()
self.assertEqual(name_cache.retrieve_name_from_cache('Cached Name'), (0, 0)) self.assertEqual(name_cache.retrieve_name_from_cache('Cached Name'), (0, 0))

View file

@ -180,7 +180,7 @@ class WebAPICase(test.SickbeardTestDBCase):
search_queue.SearchQueue(), search_queue.SearchQueue(),
cycle_time=datetime.timedelta(seconds=3), cycle_time=datetime.timedelta(seconds=3),
thread_name='SEARCHQUEUE') thread_name='SEARCHQUEUE')
sickgear.backlog_search_scheduler = search_backlog.BacklogSearchScheduler( sickgear.search_backlog_scheduler = search_backlog.BacklogSearchScheduler(
search_backlog.BacklogSearcher(), search_backlog.BacklogSearcher(),
cycle_time=datetime.timedelta(minutes=60), cycle_time=datetime.timedelta(minutes=60),
run_delay=datetime.timedelta(minutes=60), run_delay=datetime.timedelta(minutes=60),