mirror of
https://github.com/SickGear/SickGear.git
synced 2024-12-01 00:43:37 +00:00
Fixed app performance issues from recent upgrades.
Fixed issues with manual searches not working. Moved misc routine tasks to a proper schedualed thread that runs every 1 hour for updating network timezones, scene exceptions, and trimming failed history. Forced misc routine tasks at startup to complete first before anything else runs, critical since this does our scene exception updates and internal scene cache loading which is needed before searches begin.
This commit is contained in:
parent
de01fa1e37
commit
c5f933e4c8
9 changed files with 132 additions and 105 deletions
|
@ -20,7 +20,7 @@ from __future__ import with_statement
|
||||||
|
|
||||||
import cherrypy
|
import cherrypy
|
||||||
import webbrowser
|
import webbrowser
|
||||||
import sqlite3
|
import time
|
||||||
import datetime
|
import datetime
|
||||||
import socket
|
import socket
|
||||||
import os, sys, subprocess, re
|
import os, sys, subprocess, re
|
||||||
|
@ -41,6 +41,7 @@ from sickbeard import helpers, db, exceptions, show_queue, search_queue, schedul
|
||||||
from sickbeard import logger
|
from sickbeard import logger
|
||||||
from sickbeard import naming
|
from sickbeard import naming
|
||||||
from sickbeard import dailysearcher
|
from sickbeard import dailysearcher
|
||||||
|
from sickbeard import maintenance
|
||||||
from sickbeard import scene_numbering, scene_exceptions, name_cache
|
from sickbeard import scene_numbering, scene_exceptions, name_cache
|
||||||
from indexers.indexer_api import indexerApi
|
from indexers.indexer_api import indexerApi
|
||||||
from indexers.indexer_exceptions import indexer_shownotfound, indexer_exception, indexer_error, indexer_episodenotfound, \
|
from indexers.indexer_exceptions import indexer_shownotfound, indexer_exception, indexer_error, indexer_episodenotfound, \
|
||||||
|
@ -77,6 +78,7 @@ PIDFILE = ''
|
||||||
|
|
||||||
DAEMON = None
|
DAEMON = None
|
||||||
|
|
||||||
|
maintenanceScheduler = None
|
||||||
dailySearchScheduler = None
|
dailySearchScheduler = None
|
||||||
backlogSearchScheduler = None
|
backlogSearchScheduler = None
|
||||||
showUpdateScheduler = None
|
showUpdateScheduler = None
|
||||||
|
@ -472,7 +474,7 @@ def initialize(consoleLogging=True):
|
||||||
USE_FAILED_DOWNLOADS, DELETE_FAILED, ANON_REDIRECT, LOCALHOST_IP, TMDB_API_KEY, DEBUG, PROXY_SETTING, \
|
USE_FAILED_DOWNLOADS, DELETE_FAILED, ANON_REDIRECT, LOCALHOST_IP, TMDB_API_KEY, DEBUG, PROXY_SETTING, \
|
||||||
AUTOPOSTPROCESSER_FREQUENCY, DEFAULT_AUTOPOSTPROCESSER_FREQUENCY, MIN_AUTOPOSTPROCESSER_FREQUENCY, \
|
AUTOPOSTPROCESSER_FREQUENCY, DEFAULT_AUTOPOSTPROCESSER_FREQUENCY, MIN_AUTOPOSTPROCESSER_FREQUENCY, \
|
||||||
ANIME_DEFAULT, NAMING_ANIME, ANIMESUPPORT, USE_ANIDB, ANIDB_USERNAME, ANIDB_PASSWORD, ANIDB_USE_MYLIST, \
|
ANIME_DEFAULT, NAMING_ANIME, ANIMESUPPORT, USE_ANIDB, ANIDB_USERNAME, ANIDB_PASSWORD, ANIDB_USE_MYLIST, \
|
||||||
ANIME_SPLIT_HOME
|
ANIME_SPLIT_HOME, maintenanceScheduler
|
||||||
|
|
||||||
if __INITIALIZED__:
|
if __INITIALIZED__:
|
||||||
return False
|
return False
|
||||||
|
@ -904,7 +906,7 @@ def initialize(consoleLogging=True):
|
||||||
# initialize the cache database
|
# initialize the cache database
|
||||||
db.upgradeDatabase(db.DBConnection("cache.db"), cache_db.InitialSchema)
|
db.upgradeDatabase(db.DBConnection("cache.db"), cache_db.InitialSchema)
|
||||||
|
|
||||||
# initalize the failed downloads database
|
# initialize the failed downloads database
|
||||||
db.upgradeDatabase(db.DBConnection("failed.db"), failed_db.InitialSchema)
|
db.upgradeDatabase(db.DBConnection("failed.db"), failed_db.InitialSchema)
|
||||||
|
|
||||||
# fix up any db problems
|
# fix up any db problems
|
||||||
|
@ -933,10 +935,20 @@ def initialize(consoleLogging=True):
|
||||||
newznabProviderList = providers.getNewznabProviderList(NEWZNAB_DATA)
|
newznabProviderList = providers.getNewznabProviderList(NEWZNAB_DATA)
|
||||||
providerList = providers.makeProviderList()
|
providerList = providers.makeProviderList()
|
||||||
|
|
||||||
# the interval for this is stored inside the ShowUpdater class
|
maintenanceScheduler = scheduler.Scheduler(maintenance.Maintenance(),
|
||||||
showUpdaterInstance = showUpdater.ShowUpdater()
|
cycleTime=datetime.timedelta(hours=1),
|
||||||
showUpdateScheduler = scheduler.Scheduler(showUpdaterInstance,
|
threadName="MAINTENANCE",
|
||||||
cycleTime=showUpdaterInstance.updateInterval,
|
silent=True,
|
||||||
|
runImmediately=True)
|
||||||
|
|
||||||
|
dailySearchScheduler = scheduler.Scheduler(dailysearcher.DailySearcher(),
|
||||||
|
cycleTime=datetime.timedelta(minutes=DAILYSEARCH_FREQUENCY),
|
||||||
|
threadName="DAILYSEARCHER",
|
||||||
|
silent=True,
|
||||||
|
runImmediately=DAILYSEARCH_STARTUP)
|
||||||
|
|
||||||
|
showUpdateScheduler = scheduler.Scheduler(showUpdater.ShowUpdater(),
|
||||||
|
cycleTime=showUpdater.ShowUpdater().updateInterval,
|
||||||
threadName="SHOWUPDATER",
|
threadName="SHOWUPDATER",
|
||||||
runImmediately=False)
|
runImmediately=False)
|
||||||
|
|
||||||
|
@ -956,29 +968,30 @@ def initialize(consoleLogging=True):
|
||||||
threadName="SEARCHQUEUE",
|
threadName="SEARCHQUEUE",
|
||||||
silent=True)
|
silent=True)
|
||||||
|
|
||||||
properFinderInstance = properFinder.ProperFinder()
|
properFinderScheduler = scheduler.Scheduler(properFinder.ProperFinder(),
|
||||||
properFinderScheduler = scheduler.Scheduler(properFinderInstance,
|
cycleTime=properFinder.ProperFinder().updateInterval,
|
||||||
cycleTime=properFinderInstance.updateInterval,
|
|
||||||
threadName="FINDPROPERS",
|
threadName="FINDPROPERS",
|
||||||
|
silent=False if DOWNLOAD_PROPERS else True,
|
||||||
runImmediately=True)
|
runImmediately=True)
|
||||||
if not DOWNLOAD_PROPERS:
|
|
||||||
properFinderScheduler.silent = True
|
|
||||||
|
|
||||||
autoPostProcesserScheduler = scheduler.Scheduler(autoPostProcesser.PostProcesser(),
|
autoPostProcesserScheduler = scheduler.Scheduler(autoPostProcesser.PostProcesser(),
|
||||||
cycleTime=datetime.timedelta(
|
cycleTime=datetime.timedelta(
|
||||||
minutes=AUTOPOSTPROCESSER_FREQUENCY),
|
minutes=AUTOPOSTPROCESSER_FREQUENCY),
|
||||||
threadName="POSTPROCESSER",
|
threadName="POSTPROCESSER",
|
||||||
|
silent=False if PROCESS_AUTOMATICALLY else True,
|
||||||
runImmediately=True)
|
runImmediately=True)
|
||||||
if not PROCESS_AUTOMATICALLY:
|
|
||||||
autoPostProcesserScheduler.silent = True
|
|
||||||
|
|
||||||
traktWatchListCheckerSchedular = scheduler.Scheduler(traktWatchListChecker.TraktChecker(),
|
traktWatchListCheckerSchedular = scheduler.Scheduler(traktWatchListChecker.TraktChecker(),
|
||||||
cycleTime=datetime.timedelta(hours=1),
|
cycleTime=datetime.timedelta(hours=1),
|
||||||
threadName="TRAKTWATCHLIST",
|
threadName="TRAKTWATCHLIST",
|
||||||
|
silent=False if USE_TRAKT else True,
|
||||||
runImmediately=True)
|
runImmediately=True)
|
||||||
|
|
||||||
if not USE_TRAKT:
|
subtitlesFinderScheduler = scheduler.Scheduler(subtitles.SubtitlesFinder(),
|
||||||
traktWatchListCheckerSchedular.silent = True
|
cycleTime=datetime.timedelta(hours=SUBTITLES_FINDER_FREQUENCY),
|
||||||
|
threadName="FINDSUBTITLES",
|
||||||
|
silent=False if USE_SUBTITLES else True,
|
||||||
|
runImmediately=True)
|
||||||
|
|
||||||
backlogSearchScheduler = searchBacklog.BacklogSearchScheduler(searchBacklog.BacklogSearcher(),
|
backlogSearchScheduler = searchBacklog.BacklogSearchScheduler(searchBacklog.BacklogSearcher(),
|
||||||
cycleTime=datetime.timedelta(
|
cycleTime=datetime.timedelta(
|
||||||
|
@ -987,23 +1000,6 @@ def initialize(consoleLogging=True):
|
||||||
silent=True,
|
silent=True,
|
||||||
runImmediately=BACKLOG_STARTUP)
|
runImmediately=BACKLOG_STARTUP)
|
||||||
|
|
||||||
dailySearchScheduler = scheduler.Scheduler(dailysearcher.DailySearcher(),
|
|
||||||
cycleTime=datetime.timedelta(minutes=DAILYSEARCH_FREQUENCY),
|
|
||||||
threadName="DAILYSEARCHER",
|
|
||||||
silent=True,
|
|
||||||
runImmediately=DAILYSEARCH_STARTUP)
|
|
||||||
|
|
||||||
subtitlesFinderScheduler = scheduler.Scheduler(subtitles.SubtitlesFinder(),
|
|
||||||
cycleTime=datetime.timedelta(hours=SUBTITLES_FINDER_FREQUENCY),
|
|
||||||
threadName="FINDSUBTITLES",
|
|
||||||
runImmediately=True)
|
|
||||||
|
|
||||||
if not USE_SUBTITLES:
|
|
||||||
subtitlesFinderScheduler.silent = True
|
|
||||||
|
|
||||||
showList = []
|
|
||||||
loadingShowList = {}
|
|
||||||
|
|
||||||
# dynamically load provider settings
|
# dynamically load provider settings
|
||||||
for curTorrentProvider in [curProvider for curProvider in providers.sortedProviderList() if
|
for curTorrentProvider in [curProvider for curProvider in providers.sortedProviderList() if
|
||||||
curProvider.providerType == GenericProvider.TORRENT]:
|
curProvider.providerType == GenericProvider.TORRENT]:
|
||||||
|
@ -1041,7 +1037,7 @@ def initialize(consoleLogging=True):
|
||||||
curTorrentProvider.getID() + '_options', '')
|
curTorrentProvider.getID() + '_options', '')
|
||||||
if hasattr(curTorrentProvider, 'ratio'):
|
if hasattr(curTorrentProvider, 'ratio'):
|
||||||
curTorrentProvider.ratio = check_setting_str(CFG, curTorrentProvider.getID().upper(),
|
curTorrentProvider.ratio = check_setting_str(CFG, curTorrentProvider.getID().upper(),
|
||||||
curTorrentProvider.getID() + '_ratio', '')
|
curTorrentProvider.getID() + '_ratio', '')
|
||||||
if hasattr(curTorrentProvider, 'minseed'):
|
if hasattr(curTorrentProvider, 'minseed'):
|
||||||
curTorrentProvider.minseed = check_setting_int(CFG, curTorrentProvider.getID().upper(),
|
curTorrentProvider.minseed = check_setting_int(CFG, curTorrentProvider.getID().upper(),
|
||||||
curTorrentProvider.getID() + '_minseed', 0)
|
curTorrentProvider.getID() + '_minseed', 0)
|
||||||
|
@ -1099,23 +1095,29 @@ def initialize(consoleLogging=True):
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
showList = []
|
||||||
|
loadingShowList = {}
|
||||||
|
|
||||||
__INITIALIZED__ = True
|
__INITIALIZED__ = True
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def start():
|
def start():
|
||||||
global __INITIALIZED__, backlogSearchScheduler, \
|
global __INITIALIZED__, maintenanceScheduler, backlogSearchScheduler, \
|
||||||
showUpdateScheduler, versionCheckScheduler, showQueueScheduler, \
|
showUpdateScheduler, versionCheckScheduler, showQueueScheduler, \
|
||||||
properFinderScheduler, autoPostProcesserScheduler, searchQueueScheduler, \
|
properFinderScheduler, autoPostProcesserScheduler, searchQueueScheduler, \
|
||||||
subtitlesFinderScheduler, started, USE_SUBTITLES, \
|
subtitlesFinderScheduler, USE_SUBTITLES,traktWatchListCheckerSchedular, \
|
||||||
traktWatchListCheckerSchedular, dailySearchScheduler, started
|
dailySearchScheduler, started
|
||||||
|
|
||||||
with INIT_LOCK:
|
with INIT_LOCK:
|
||||||
|
|
||||||
if __INITIALIZED__:
|
if __INITIALIZED__:
|
||||||
|
|
||||||
# start the version checker
|
# start the maintenance scheduler
|
||||||
versionCheckScheduler.thread.start()
|
maintenanceScheduler.thread.start()
|
||||||
|
logger.log(u"Performing initial maintenance tasks, please wait ...")
|
||||||
|
while maintenanceScheduler.action.amActive:
|
||||||
|
time.sleep(1)
|
||||||
|
|
||||||
# start the daily search scheduler
|
# start the daily search scheduler
|
||||||
dailySearchScheduler.thread.start()
|
dailySearchScheduler.thread.start()
|
||||||
|
@ -1123,18 +1125,21 @@ def start():
|
||||||
# start the backlog scheduler
|
# start the backlog scheduler
|
||||||
backlogSearchScheduler.thread.start()
|
backlogSearchScheduler.thread.start()
|
||||||
|
|
||||||
|
# start the show updater
|
||||||
|
showUpdateScheduler.thread.start()
|
||||||
|
|
||||||
|
# start the version checker
|
||||||
|
versionCheckScheduler.thread.start()
|
||||||
|
|
||||||
|
# start the queue checker
|
||||||
|
showQueueScheduler.thread.start()
|
||||||
|
|
||||||
# start the search queue checker
|
# start the search queue checker
|
||||||
searchQueueScheduler.thread.start()
|
searchQueueScheduler.thread.start()
|
||||||
|
|
||||||
# start the queue checker
|
# start the queue checker
|
||||||
properFinderScheduler.thread.start()
|
properFinderScheduler.thread.start()
|
||||||
|
|
||||||
# start the queue checker
|
|
||||||
showQueueScheduler.thread.start()
|
|
||||||
|
|
||||||
# start the show updater
|
|
||||||
showUpdateScheduler.thread.start()
|
|
||||||
|
|
||||||
# start the proper finder
|
# start the proper finder
|
||||||
autoPostProcesserScheduler.thread.start()
|
autoPostProcesserScheduler.thread.start()
|
||||||
|
|
||||||
|
@ -1149,10 +1154,11 @@ def start():
|
||||||
|
|
||||||
|
|
||||||
def halt():
|
def halt():
|
||||||
global __INITIALIZED__, backlogSearchScheduler, showUpdateScheduler, \
|
global __INITIALIZED__, maintenanceScheduler, backlogSearchScheduler, \
|
||||||
showQueueScheduler, properFinderScheduler, autoPostProcesserScheduler, searchQueueScheduler, \
|
showUpdateScheduler, versionCheckScheduler, showQueueScheduler, \
|
||||||
subtitlesFinderScheduler, dailySearchScheduler, started, \
|
properFinderScheduler, autoPostProcesserScheduler, searchQueueScheduler, \
|
||||||
traktWatchListCheckerSchedular
|
subtitlesFinderScheduler, traktWatchListCheckerSchedular, \
|
||||||
|
dailySearchScheduler, started
|
||||||
|
|
||||||
with INIT_LOCK:
|
with INIT_LOCK:
|
||||||
|
|
||||||
|
@ -1162,10 +1168,10 @@ def halt():
|
||||||
|
|
||||||
# abort all the threads
|
# abort all the threads
|
||||||
|
|
||||||
backlogSearchScheduler.abort = True
|
maintenanceScheduler.abort = True
|
||||||
logger.log(u"Waiting for the BACKLOG thread to exit")
|
logger.log(u"Waiting for the MAINTENANCE scheduler thread to exit")
|
||||||
try:
|
try:
|
||||||
backlogSearchScheduler.thread.join(10)
|
maintenanceScheduler.thread.join(10)
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -1176,6 +1182,13 @@ def halt():
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
backlogSearchScheduler.abort = True
|
||||||
|
logger.log(u"Waiting for the BACKLOG thread to exit")
|
||||||
|
try:
|
||||||
|
backlogSearchScheduler.thread.join(10)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
showUpdateScheduler.abort = True
|
showUpdateScheduler.abort = True
|
||||||
logger.log(u"Waiting for the SHOWUPDATER thread to exit")
|
logger.log(u"Waiting for the SHOWUPDATER thread to exit")
|
||||||
try:
|
try:
|
||||||
|
@ -1234,7 +1247,7 @@ def halt():
|
||||||
|
|
||||||
if ADBA_CONNECTION:
|
if ADBA_CONNECTION:
|
||||||
ADBA_CONNECTION.logout()
|
ADBA_CONNECTION.logout()
|
||||||
#ADBA_CONNECTION.stop()
|
# ADBA_CONNECTION.stop()
|
||||||
logger.log(u"Waiting for the ANIDB CONNECTION thread to exit")
|
logger.log(u"Waiting for the ANIDB CONNECTION thread to exit")
|
||||||
try:
|
try:
|
||||||
ADBA_CONNECTION.join(5)
|
ADBA_CONNECTION.join(5)
|
||||||
|
@ -1336,7 +1349,7 @@ def restart(soft=True):
|
||||||
if soft:
|
if soft:
|
||||||
halt()
|
halt()
|
||||||
saveAll()
|
saveAll()
|
||||||
#logger.log(u"Restarting cherrypy")
|
# logger.log(u"Restarting cherrypy")
|
||||||
#cherrypy.engine.restart()
|
#cherrypy.engine.restart()
|
||||||
logger.log(u"Re-initializing all data")
|
logger.log(u"Re-initializing all data")
|
||||||
initialize()
|
initialize()
|
||||||
|
|
|
@ -17,10 +17,9 @@
|
||||||
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
|
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
from __future__ import with_statement
|
from __future__ import with_statement
|
||||||
import time
|
|
||||||
import datetime
|
import datetime
|
||||||
import threading
|
import threading
|
||||||
import traceback
|
|
||||||
|
|
||||||
import sickbeard
|
import sickbeard
|
||||||
from sickbeard import logger
|
from sickbeard import logger
|
||||||
|
@ -28,9 +27,6 @@ from sickbeard import db
|
||||||
from sickbeard import common
|
from sickbeard import common
|
||||||
from sickbeard import helpers
|
from sickbeard import helpers
|
||||||
from sickbeard import exceptions
|
from sickbeard import exceptions
|
||||||
from sickbeard.exceptions import ex
|
|
||||||
from sickbeard.search import pickBestResult, snatchEpisode
|
|
||||||
from sickbeard import generic_queue
|
|
||||||
|
|
||||||
class DailySearcher():
|
class DailySearcher():
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
@ -90,6 +86,6 @@ class DailySearcher():
|
||||||
sickbeard.name_cache.clearCache(show)
|
sickbeard.name_cache.clearCache(show)
|
||||||
|
|
||||||
dailysearch_queue_item = sickbeard.search_queue.DailySearchQueueItem(show, segment)
|
dailysearch_queue_item = sickbeard.search_queue.DailySearchQueueItem(show, segment)
|
||||||
sickbeard.searchQueueScheduler.action.add_item(dailysearch_queue_item) #@UndefinedVariable
|
sickbeard.searchQueueScheduler.action.add_item(dailysearch_queue_item)
|
||||||
else:
|
else:
|
||||||
logger.log(u"Could not find any needed episodes to search for ...")
|
logger.log(u"Could not find any needed episodes to search for ...")
|
47
sickbeard/maintenance.py
Normal file
47
sickbeard/maintenance.py
Normal file
|
@ -0,0 +1,47 @@
|
||||||
|
# Author: Nic Wolfe <nic@wolfeden.ca>
|
||||||
|
# URL: http://code.google.com/p/sickbeard/
|
||||||
|
#
|
||||||
|
# This file is part of SickRage.
|
||||||
|
#
|
||||||
|
# SickRage is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# SickRage is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from __future__ import with_statement
|
||||||
|
import threading
|
||||||
|
import sickbeard
|
||||||
|
|
||||||
|
from sickbeard import scene_exceptions
|
||||||
|
from sickbeard import failed_history
|
||||||
|
from sickbeard import network_timezones
|
||||||
|
|
||||||
|
|
||||||
|
class Maintenance():
|
||||||
|
def __init__(self):
|
||||||
|
self.lock = threading.Lock()
|
||||||
|
|
||||||
|
self.amActive = False
|
||||||
|
|
||||||
|
def run(self, force=False):
|
||||||
|
self.amActive = True
|
||||||
|
|
||||||
|
# refresh scene exceptions too
|
||||||
|
scene_exceptions.retrieve_exceptions()
|
||||||
|
|
||||||
|
# refresh network timezones
|
||||||
|
network_timezones.update_network_dict()
|
||||||
|
|
||||||
|
# sure, why not?
|
||||||
|
if sickbeard.USE_FAILED_DOWNLOADS:
|
||||||
|
failed_history.trimHistory()
|
||||||
|
|
||||||
|
self.amActive = False
|
|
@ -11,7 +11,7 @@
|
||||||
# SickRage is distributed in the hope that it will be useful,
|
# SickRage is distributed in the hope that it will be useful,
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
# GNU General Public License for more details.
|
# GNU General Public License for more details.
|
||||||
#
|
#
|
||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
|
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
|
@ -37,6 +37,7 @@ DAILY_SEARCH = 20
|
||||||
FAILED_SEARCH = 30
|
FAILED_SEARCH = 30
|
||||||
MANUAL_SEARCH = 30
|
MANUAL_SEARCH = 30
|
||||||
|
|
||||||
|
|
||||||
class SearchQueue(generic_queue.GenericQueue):
|
class SearchQueue(generic_queue.GenericQueue):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
generic_queue.GenericQueue.__init__(self)
|
generic_queue.GenericQueue.__init__(self)
|
||||||
|
@ -83,6 +84,7 @@ class SearchQueue(generic_queue.GenericQueue):
|
||||||
else:
|
else:
|
||||||
logger.log(u"Not adding item, it's already in the queue", logger.DEBUG)
|
logger.log(u"Not adding item, it's already in the queue", logger.DEBUG)
|
||||||
|
|
||||||
|
|
||||||
class DailySearchQueueItem(generic_queue.QueueItem):
|
class DailySearchQueueItem(generic_queue.QueueItem):
|
||||||
def __init__(self, show, segment):
|
def __init__(self, show, segment):
|
||||||
generic_queue.QueueItem.__init__(self, 'Daily Search', DAILY_SEARCH)
|
generic_queue.QueueItem.__init__(self, 'Daily Search', DAILY_SEARCH)
|
||||||
|
@ -113,6 +115,7 @@ class DailySearchQueueItem(generic_queue.QueueItem):
|
||||||
|
|
||||||
generic_queue.QueueItem.finish(self)
|
generic_queue.QueueItem.finish(self)
|
||||||
|
|
||||||
|
|
||||||
class ManualSearchQueueItem(generic_queue.QueueItem):
|
class ManualSearchQueueItem(generic_queue.QueueItem):
|
||||||
def __init__(self, show, segment):
|
def __init__(self, show, segment):
|
||||||
generic_queue.QueueItem.__init__(self, 'Manual Search', MANUAL_SEARCH)
|
generic_queue.QueueItem.__init__(self, 'Manual Search', MANUAL_SEARCH)
|
||||||
|
@ -155,6 +158,7 @@ class ManualSearchQueueItem(generic_queue.QueueItem):
|
||||||
self.success = False
|
self.success = False
|
||||||
generic_queue.QueueItem.finish(self)
|
generic_queue.QueueItem.finish(self)
|
||||||
|
|
||||||
|
|
||||||
class BacklogQueueItem(generic_queue.QueueItem):
|
class BacklogQueueItem(generic_queue.QueueItem):
|
||||||
def __init__(self, show, segment):
|
def __init__(self, show, segment):
|
||||||
generic_queue.QueueItem.__init__(self, 'Backlog', BACKLOG_SEARCH)
|
generic_queue.QueueItem.__init__(self, 'Backlog', BACKLOG_SEARCH)
|
||||||
|
@ -168,7 +172,8 @@ class BacklogQueueItem(generic_queue.QueueItem):
|
||||||
generic_queue.QueueItem.execute(self)
|
generic_queue.QueueItem.execute(self)
|
||||||
|
|
||||||
for season in self.segment:
|
for season in self.segment:
|
||||||
sickbeard.searchBacklog.BacklogSearcher.currentSearchInfo = {'title': self.show.name + " Season " + str(season)}
|
sickbeard.searchBacklog.BacklogSearcher.currentSearchInfo = {
|
||||||
|
'title': self.show.name + " Season " + str(season)}
|
||||||
|
|
||||||
wantedEps = self.segment[season]
|
wantedEps = self.segment[season]
|
||||||
|
|
||||||
|
@ -196,6 +201,7 @@ class BacklogQueueItem(generic_queue.QueueItem):
|
||||||
|
|
||||||
self.finish()
|
self.finish()
|
||||||
|
|
||||||
|
|
||||||
class FailedQueueItem(generic_queue.QueueItem):
|
class FailedQueueItem(generic_queue.QueueItem):
|
||||||
def __init__(self, show, segment):
|
def __init__(self, show, segment):
|
||||||
generic_queue.QueueItem.__init__(self, 'Retry', FAILED_SEARCH)
|
generic_queue.QueueItem.__init__(self, 'Retry', FAILED_SEARCH)
|
||||||
|
|
|
@ -27,7 +27,8 @@ from sickbeard import ui
|
||||||
from sickbeard.exceptions import ex
|
from sickbeard.exceptions import ex
|
||||||
from sickbeard import encodingKludge as ek
|
from sickbeard import encodingKludge as ek
|
||||||
from sickbeard import db
|
from sickbeard import db
|
||||||
|
from sickbeard import network_timezones
|
||||||
|
from sickbeard import failed_history
|
||||||
|
|
||||||
class ShowUpdater():
|
class ShowUpdater():
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
|
|
@ -261,7 +261,7 @@ class TVCache():
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def _addCacheEntry(self, name, url, quality=None):
|
def _addCacheEntry(self, name, url, quality=None):
|
||||||
# if we don't have complete info then parse the filename to get it
|
|
||||||
try:
|
try:
|
||||||
myParser = NameParser()
|
myParser = NameParser()
|
||||||
parse_result = myParser.parse(name).convert()
|
parse_result = myParser.parse(name).convert()
|
||||||
|
@ -269,32 +269,11 @@ class TVCache():
|
||||||
logger.log(u"Unable to parse the filename " + name + " into a valid episode", logger.DEBUG)
|
logger.log(u"Unable to parse the filename " + name + " into a valid episode", logger.DEBUG)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if not parse_result:
|
if not parse_result or not parse_result.series_name:
|
||||||
logger.log(u"Giving up because I'm unable to parse this name: " + name, logger.DEBUG)
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if not parse_result.series_name:
|
if not parse_result.show:
|
||||||
logger.log(u"No series name retrieved from " + name + ", unable to cache it", logger.DEBUG)
|
|
||||||
return None
|
|
||||||
|
|
||||||
showObj = None
|
|
||||||
if parse_result.show:
|
|
||||||
showObj = parse_result.show
|
|
||||||
|
|
||||||
if not showObj:
|
|
||||||
showResult = helpers.searchDBForShow(parse_result.series_name)
|
|
||||||
if showResult:
|
|
||||||
showObj = helpers.findCertainShow(sickbeard.showList, int(showResult[0]))
|
|
||||||
|
|
||||||
if not showObj:
|
|
||||||
for curShow in sickbeard.showList:
|
|
||||||
if show_name_helpers.isGoodResult(name, curShow, False):
|
|
||||||
showObj = curShow
|
|
||||||
break
|
|
||||||
|
|
||||||
if not showObj:
|
|
||||||
logger.log(u"No match for show: [" + parse_result.series_name + "], not caching ...", logger.DEBUG)
|
logger.log(u"No match for show: [" + parse_result.series_name + "], not caching ...", logger.DEBUG)
|
||||||
sickbeard.name_cache.addNameToCache(parse_result.series_name, 0)
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
season = episodes = None
|
season = episodes = None
|
||||||
|
@ -304,7 +283,7 @@ class TVCache():
|
||||||
airdate = parse_result.air_date.toordinal() or parse_result.sports_event_date.toordinal()
|
airdate = parse_result.air_date.toordinal() or parse_result.sports_event_date.toordinal()
|
||||||
sql_results = myDB.select(
|
sql_results = myDB.select(
|
||||||
"SELECT season, episode FROM tv_episodes WHERE showid = ? AND indexer = ? AND airdate = ?",
|
"SELECT season, episode FROM tv_episodes WHERE showid = ? AND indexer = ? AND airdate = ?",
|
||||||
[showObj.indexerid, showObj.indexer, airdate])
|
[parse_result.show.indexerid, parse_result.show.indexer, airdate])
|
||||||
if sql_results > 0:
|
if sql_results > 0:
|
||||||
season = int(sql_results[0]["season"])
|
season = int(sql_results[0]["season"])
|
||||||
episodes = [int(sql_results[0]["episode"])]
|
episodes = [int(sql_results[0]["episode"])]
|
||||||
|
@ -330,7 +309,7 @@ class TVCache():
|
||||||
|
|
||||||
return [
|
return [
|
||||||
"INSERT INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality) VALUES (?,?,?,?,?,?,?)",
|
"INSERT INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality) VALUES (?,?,?,?,?,?,?)",
|
||||||
[name, season, episodeText, showObj.indexerid, url, curTimestamp, quality]]
|
[name, season, episodeText, parse_result.show.indexerid, url, curTimestamp, quality]]
|
||||||
|
|
||||||
|
|
||||||
def searchCache(self, episodes, manualSearch=False):
|
def searchCache(self, episodes, manualSearch=False):
|
||||||
|
|
|
@ -33,12 +33,8 @@ import sickbeard
|
||||||
from sickbeard import helpers
|
from sickbeard import helpers
|
||||||
from sickbeard import version, ui
|
from sickbeard import version, ui
|
||||||
from sickbeard import logger
|
from sickbeard import logger
|
||||||
from sickbeard import scene_exceptions
|
|
||||||
from sickbeard.exceptions import ex
|
from sickbeard.exceptions import ex
|
||||||
from sickbeard import encodingKludge as ek
|
from sickbeard import encodingKludge as ek
|
||||||
from sickbeard import failed_history
|
|
||||||
from sickbeard import network_timezones
|
|
||||||
|
|
||||||
|
|
||||||
class CheckVersion():
|
class CheckVersion():
|
||||||
"""
|
"""
|
||||||
|
@ -69,17 +65,6 @@ class CheckVersion():
|
||||||
# do a soft restart
|
# do a soft restart
|
||||||
threading.Timer(2, sickbeard.invoke_restart, [False]).start()
|
threading.Timer(2, sickbeard.invoke_restart, [False]).start()
|
||||||
|
|
||||||
if not updated:
|
|
||||||
# refresh scene exceptions too
|
|
||||||
scene_exceptions.retrieve_exceptions()
|
|
||||||
|
|
||||||
# refresh network timezones
|
|
||||||
network_timezones.update_network_dict()
|
|
||||||
|
|
||||||
# sure, why not?
|
|
||||||
if sickbeard.USE_FAILED_DOWNLOADS:
|
|
||||||
failed_history.trimHistory()
|
|
||||||
|
|
||||||
def find_install_type(self):
|
def find_install_type(self):
|
||||||
"""
|
"""
|
||||||
Determines how this copy of SB was installed.
|
Determines how this copy of SB was installed.
|
||||||
|
|
|
@ -205,7 +205,7 @@ class ManageSearches:
|
||||||
#t.backlogPI = sickbeard.backlogSearchScheduler.action.getProgressIndicator()
|
#t.backlogPI = sickbeard.backlogSearchScheduler.action.getProgressIndicator()
|
||||||
t.backlogPaused = sickbeard.searchQueueScheduler.action.is_backlog_paused() # @UndefinedVariable
|
t.backlogPaused = sickbeard.searchQueueScheduler.action.is_backlog_paused() # @UndefinedVariable
|
||||||
t.backlogRunning = sickbeard.searchQueueScheduler.action.is_backlog_in_progress() # @UndefinedVariable
|
t.backlogRunning = sickbeard.searchQueueScheduler.action.is_backlog_in_progress() # @UndefinedVariable
|
||||||
t.dailySearchStatus = sickbeard.searchQueueScheduler.action.is_dailysearch_in_progress() # @UndefinedVariable
|
t.dailySearchStatus = sickbeard.searchQueueScheduler.action.amActive # @UndefinedVariable
|
||||||
t.findPropersStatus = sickbeard.properFinderScheduler.action.amActive # @UndefinedVariable
|
t.findPropersStatus = sickbeard.properFinderScheduler.action.amActive # @UndefinedVariable
|
||||||
|
|
||||||
t.submenu = ManageMenu()
|
t.submenu = ManageMenu()
|
||||||
|
|
Loading…
Reference in a new issue