mirror of
https://github.com/SickGear/SickGear.git
synced 2024-12-02 17:33:37 +00:00
Merge branch 'origin/master'
Conflicts: sickbeard/__init__.py
This commit is contained in:
commit
e977750702
28 changed files with 367 additions and 402 deletions
|
@ -13,9 +13,7 @@
|
||||||
#set $numGoodShows = len([x for x in $sickbeard.showList if x.paused == 0 and x.status != "Ended"])
|
#set $numGoodShows = len([x for x in $sickbeard.showList if x.paused == 0 and x.status != "Ended"])
|
||||||
#set $numDLEpisodes = $myDB.select("SELECT COUNT(*) FROM tv_episodes WHERE status IN ("+",".join([str(x) for x in $Quality.DOWNLOADED + [$ARCHIVED]])+") AND season != 0 and episode != 0 AND airdate <= "+$today+"")[0][0]
|
#set $numDLEpisodes = $myDB.select("SELECT COUNT(*) FROM tv_episodes WHERE status IN ("+",".join([str(x) for x in $Quality.DOWNLOADED + [$ARCHIVED]])+") AND season != 0 and episode != 0 AND airdate <= "+$today+"")[0][0]
|
||||||
#set $numEpisodes = $myDB.select("SELECT COUNT(*) FROM tv_episodes WHERE season != 0 and episode != 0 AND (airdate != 1 OR status IN ("+",".join([str(x) for x in ($Quality.DOWNLOADED + $Quality.SNATCHED + $Quality.SNATCHED_PROPER) + [$ARCHIVED]])+")) AND airdate <= "+$today+" AND status != "+str($IGNORED)+"")[0][0]
|
#set $numEpisodes = $myDB.select("SELECT COUNT(*) FROM tv_episodes WHERE season != 0 and episode != 0 AND (airdate != 1 OR status IN ("+",".join([str(x) for x in ($Quality.DOWNLOADED + $Quality.SNATCHED + $Quality.SNATCHED_PROPER) + [$ARCHIVED]])+")) AND airdate <= "+$today+" AND status != "+str($IGNORED)+"")[0][0]
|
||||||
<b>$numShows shows</b> ($numGoodShows active) | <b>$numDLEpisodes/$numEpisodes</b> episodes downloaded |
|
<b>$numShows shows</b> ($numGoodShows active) | <b>$numDLEpisodes/$numEpisodes</b> episodes downloaded |
|
||||||
<b>Search</b>: <%=str(sickbeard.currentSearchScheduler.timeLeft()).split('.')[0]%> |
|
|
||||||
<!--<b>Update</b>: <a%a=str(sickbeard.updateScheduler.timeLeft()).split('.')[0]%> | //-->
|
|
||||||
<b>Backlog</b>: $sbdatetime.sbdatetime.sbfdate($sickbeard.backlogSearchScheduler.nextRun())
|
<b>Backlog</b>: $sbdatetime.sbdatetime.sbfdate($sickbeard.backlogSearchScheduler.nextRun())
|
||||||
</div>
|
</div>
|
||||||
<ul style="float:right;">
|
<ul style="float:right;">
|
||||||
|
|
|
@ -86,7 +86,7 @@ def _worker(executor_reference, work_queue):
|
||||||
_base.LOGGER.critical('Exception in worker', exc_info=True)
|
_base.LOGGER.critical('Exception in worker', exc_info=True)
|
||||||
|
|
||||||
class ThreadPoolExecutor(_base.Executor):
|
class ThreadPoolExecutor(_base.Executor):
|
||||||
def __init__(self, max_workers, name=None):
|
def __init__(self, max_workers):
|
||||||
"""Initializes a new ThreadPoolExecutor instance.
|
"""Initializes a new ThreadPoolExecutor instance.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
|
@ -98,7 +98,6 @@ class ThreadPoolExecutor(_base.Executor):
|
||||||
self._threads = set()
|
self._threads = set()
|
||||||
self._shutdown = False
|
self._shutdown = False
|
||||||
self._shutdown_lock = threading.Lock()
|
self._shutdown_lock = threading.Lock()
|
||||||
self._name = name
|
|
||||||
|
|
||||||
def submit(self, fn, *args, **kwargs):
|
def submit(self, fn, *args, **kwargs):
|
||||||
with self._shutdown_lock:
|
with self._shutdown_lock:
|
||||||
|
@ -109,11 +108,16 @@ class ThreadPoolExecutor(_base.Executor):
|
||||||
w = _WorkItem(f, fn, args, kwargs)
|
w = _WorkItem(f, fn, args, kwargs)
|
||||||
|
|
||||||
self._work_queue.put(w)
|
self._work_queue.put(w)
|
||||||
self._adjust_thread_count()
|
|
||||||
|
name = None
|
||||||
|
if kwargs.has_key('name'):
|
||||||
|
name = kwargs.pop('name')
|
||||||
|
|
||||||
|
self._adjust_thread_count(name)
|
||||||
return f
|
return f
|
||||||
submit.__doc__ = _base.Executor.submit.__doc__
|
submit.__doc__ = _base.Executor.submit.__doc__
|
||||||
|
|
||||||
def _adjust_thread_count(self):
|
def _adjust_thread_count(self, name=None):
|
||||||
# When the executor gets lost, the weakref callback will wake up
|
# When the executor gets lost, the weakref callback will wake up
|
||||||
# the worker threads.
|
# the worker threads.
|
||||||
def weakref_cb(_, q=self._work_queue):
|
def weakref_cb(_, q=self._work_queue):
|
||||||
|
@ -124,8 +128,8 @@ class ThreadPoolExecutor(_base.Executor):
|
||||||
t = threading.Thread(target=_worker,
|
t = threading.Thread(target=_worker,
|
||||||
args=(weakref.ref(self, weakref_cb),
|
args=(weakref.ref(self, weakref_cb),
|
||||||
self._work_queue),)
|
self._work_queue),)
|
||||||
if self._name:
|
if name:
|
||||||
t.name = self._name
|
t.name = name
|
||||||
t.daemon = True
|
t.daemon = True
|
||||||
t.start()
|
t.start()
|
||||||
self._threads.add(t)
|
self._threads.add(t)
|
||||||
|
|
|
@ -33,7 +33,7 @@ from sickbeard import providers, metadata, config
|
||||||
from providers import ezrss, tvtorrents, btn, newznab, womble, thepiratebay, torrentleech, kat, publichd, iptorrents, \
|
from providers import ezrss, tvtorrents, btn, newznab, womble, thepiratebay, torrentleech, kat, publichd, iptorrents, \
|
||||||
omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, nextgen, speedcd
|
omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, nextgen, speedcd
|
||||||
from sickbeard.config import CheckSection, check_setting_int, check_setting_str, ConfigMigrator, naming_ep_type
|
from sickbeard.config import CheckSection, check_setting_int, check_setting_str, ConfigMigrator, naming_ep_type
|
||||||
from sickbeard import searchCurrent, searchBacklog, showUpdater, versionChecker, properFinder, autoPostProcesser, \
|
from sickbeard import searchBacklog, showUpdater, versionChecker, properFinder, autoPostProcesser, \
|
||||||
subtitles, traktWatchListChecker
|
subtitles, traktWatchListChecker
|
||||||
from sickbeard import helpers, db, exceptions, show_queue, search_queue, scheduler, show_name_helpers
|
from sickbeard import helpers, db, exceptions, show_queue, search_queue, scheduler, show_name_helpers
|
||||||
from sickbeard import logger
|
from sickbeard import logger
|
||||||
|
@ -74,7 +74,6 @@ PIDFILE = ''
|
||||||
DAEMON = None
|
DAEMON = None
|
||||||
|
|
||||||
backlogSearchScheduler = None
|
backlogSearchScheduler = None
|
||||||
currentSearchScheduler = None
|
|
||||||
showUpdateScheduler = None
|
showUpdateScheduler = None
|
||||||
versionCheckScheduler = None
|
versionCheckScheduler = None
|
||||||
showQueueScheduler = None
|
showQueueScheduler = None
|
||||||
|
@ -489,7 +488,7 @@ def initialize(consoleLogging=True):
|
||||||
global ACTUAL_LOG_DIR, LOG_DIR, WEB_PORT, WEB_LOG, ENCRYPTION_VERSION, WEB_ROOT, WEB_USERNAME, WEB_PASSWORD, WEB_HOST, WEB_IPV6, USE_API, API_KEY, ENABLE_HTTPS, HTTPS_CERT, HTTPS_KEY, \
|
global ACTUAL_LOG_DIR, LOG_DIR, WEB_PORT, WEB_LOG, ENCRYPTION_VERSION, WEB_ROOT, WEB_USERNAME, WEB_PASSWORD, WEB_HOST, WEB_IPV6, USE_API, API_KEY, ENABLE_HTTPS, HTTPS_CERT, HTTPS_KEY, \
|
||||||
HANDLE_REVERSE_PROXY, USE_NZBS, USE_TORRENTS, NZB_METHOD, NZB_DIR, DOWNLOAD_PROPERS, PREFER_EPISODE_RELEASES, ALLOW_HIGH_PRIORITY, TORRENT_METHOD, \
|
HANDLE_REVERSE_PROXY, USE_NZBS, USE_TORRENTS, NZB_METHOD, NZB_DIR, DOWNLOAD_PROPERS, PREFER_EPISODE_RELEASES, ALLOW_HIGH_PRIORITY, TORRENT_METHOD, \
|
||||||
SAB_USERNAME, SAB_PASSWORD, SAB_APIKEY, SAB_CATEGORY, SAB_HOST, \
|
SAB_USERNAME, SAB_PASSWORD, SAB_APIKEY, SAB_CATEGORY, SAB_HOST, \
|
||||||
NZBGET_USERNAME, NZBGET_PASSWORD, NZBGET_CATEGORY, NZBGET_HOST, NZBGET_USE_HTTPS, currentSearchScheduler, backlogSearchScheduler, \
|
NZBGET_USERNAME, NZBGET_PASSWORD, NZBGET_CATEGORY, NZBGET_HOST, NZBGET_USE_HTTPS, backlogSearchScheduler, \
|
||||||
TORRENT_USERNAME, TORRENT_PASSWORD, TORRENT_HOST, TORRENT_PATH, TORRENT_RATIO, TORRENT_SEED_TIME, TORRENT_PAUSED, TORRENT_HIGH_BANDWIDTH, TORRENT_LABEL, TORRENT_VERIFY_CERT, \
|
TORRENT_USERNAME, TORRENT_PASSWORD, TORRENT_HOST, TORRENT_PATH, TORRENT_RATIO, TORRENT_SEED_TIME, TORRENT_PAUSED, TORRENT_HIGH_BANDWIDTH, TORRENT_LABEL, TORRENT_VERIFY_CERT, \
|
||||||
USE_XBMC, XBMC_ALWAYS_ON, XBMC_NOTIFY_ONSNATCH, XBMC_NOTIFY_ONDOWNLOAD, XBMC_NOTIFY_ONSUBTITLEDOWNLOAD, XBMC_UPDATE_FULL, XBMC_UPDATE_ONLYFIRST, \
|
USE_XBMC, XBMC_ALWAYS_ON, XBMC_NOTIFY_ONSNATCH, XBMC_NOTIFY_ONDOWNLOAD, XBMC_NOTIFY_ONSUBTITLEDOWNLOAD, XBMC_UPDATE_FULL, XBMC_UPDATE_ONLYFIRST, \
|
||||||
XBMC_UPDATE_LIBRARY, XBMC_HOST, XBMC_USERNAME, XBMC_PASSWORD, \
|
XBMC_UPDATE_LIBRARY, XBMC_HOST, XBMC_USERNAME, XBMC_PASSWORD, \
|
||||||
|
@ -1049,12 +1048,6 @@ def initialize(consoleLogging=True):
|
||||||
newznabProviderList = providers.getNewznabProviderList(NEWZNAB_DATA)
|
newznabProviderList = providers.getNewznabProviderList(NEWZNAB_DATA)
|
||||||
providerList = providers.makeProviderList()
|
providerList = providers.makeProviderList()
|
||||||
|
|
||||||
# initialize newznab providers
|
|
||||||
currentSearchScheduler = scheduler.Scheduler(searchCurrent.CurrentSearcher(),
|
|
||||||
cycleTime=datetime.timedelta(minutes=SEARCH_FREQUENCY),
|
|
||||||
threadName="SEARCH",
|
|
||||||
runImmediately=True)
|
|
||||||
|
|
||||||
# the interval for this is stored inside the ShowUpdater class
|
# the interval for this is stored inside the ShowUpdater class
|
||||||
showUpdaterInstance = showUpdater.ShowUpdater()
|
showUpdaterInstance = showUpdater.ShowUpdater()
|
||||||
showUpdateScheduler = scheduler.Scheduler(showUpdaterInstance,
|
showUpdateScheduler = scheduler.Scheduler(showUpdaterInstance,
|
||||||
|
@ -1070,7 +1063,8 @@ def initialize(consoleLogging=True):
|
||||||
showQueueScheduler = scheduler.Scheduler(show_queue.ShowQueue(),
|
showQueueScheduler = scheduler.Scheduler(show_queue.ShowQueue(),
|
||||||
cycleTime=datetime.timedelta(seconds=3),
|
cycleTime=datetime.timedelta(seconds=3),
|
||||||
threadName="SHOWQUEUE",
|
threadName="SHOWQUEUE",
|
||||||
silent=True)
|
silent=True,
|
||||||
|
runImmediately=True)
|
||||||
|
|
||||||
searchQueueScheduler = scheduler.Scheduler(search_queue.SearchQueue(),
|
searchQueueScheduler = scheduler.Scheduler(search_queue.SearchQueue(),
|
||||||
cycleTime=datetime.timedelta(seconds=3),
|
cycleTime=datetime.timedelta(seconds=3),
|
||||||
|
@ -1125,7 +1119,7 @@ def initialize(consoleLogging=True):
|
||||||
|
|
||||||
|
|
||||||
def start():
|
def start():
|
||||||
global __INITIALIZED__, currentSearchScheduler, backlogSearchScheduler, \
|
global __INITIALIZED__, backlogSearchScheduler, \
|
||||||
showUpdateScheduler, versionCheckScheduler, showQueueScheduler, \
|
showUpdateScheduler, versionCheckScheduler, showQueueScheduler, \
|
||||||
properFinderScheduler, autoPostProcesserScheduler, searchQueueScheduler, \
|
properFinderScheduler, autoPostProcesserScheduler, searchQueueScheduler, \
|
||||||
subtitlesFinderScheduler, started, USE_SUBTITLES, \
|
subtitlesFinderScheduler, started, USE_SUBTITLES, \
|
||||||
|
@ -1135,8 +1129,11 @@ def start():
|
||||||
|
|
||||||
if __INITIALIZED__:
|
if __INITIALIZED__:
|
||||||
|
|
||||||
# start the search scheduler
|
# start the queue checker
|
||||||
currentSearchScheduler.thread.start()
|
showQueueScheduler.thread.start()
|
||||||
|
|
||||||
|
# start the version checker
|
||||||
|
versionCheckScheduler.thread.start()
|
||||||
|
|
||||||
# start the backlog scheduler
|
# start the backlog scheduler
|
||||||
backlogSearchScheduler.thread.start()
|
backlogSearchScheduler.thread.start()
|
||||||
|
@ -1144,12 +1141,6 @@ def start():
|
||||||
# start the show updater
|
# start the show updater
|
||||||
showUpdateScheduler.thread.start()
|
showUpdateScheduler.thread.start()
|
||||||
|
|
||||||
# start the version checker
|
|
||||||
versionCheckScheduler.thread.start()
|
|
||||||
|
|
||||||
# start the queue checker
|
|
||||||
showQueueScheduler.thread.start()
|
|
||||||
|
|
||||||
# start the search queue checker
|
# start the search queue checker
|
||||||
searchQueueScheduler.thread.start()
|
searchQueueScheduler.thread.start()
|
||||||
|
|
||||||
|
@ -1170,7 +1161,7 @@ def start():
|
||||||
|
|
||||||
|
|
||||||
def halt():
|
def halt():
|
||||||
global __INITIALIZED__, currentSearchScheduler, backlogSearchScheduler, showUpdateScheduler, \
|
global __INITIALIZED__, backlogSearchScheduler, showUpdateScheduler, \
|
||||||
showQueueScheduler, properFinderScheduler, autoPostProcesserScheduler, searchQueueScheduler, \
|
showQueueScheduler, properFinderScheduler, autoPostProcesserScheduler, searchQueueScheduler, \
|
||||||
subtitlesFinderScheduler, started, \
|
subtitlesFinderScheduler, started, \
|
||||||
traktWatchListCheckerSchedular
|
traktWatchListCheckerSchedular
|
||||||
|
@ -1183,13 +1174,6 @@ def halt():
|
||||||
|
|
||||||
# abort all the threads
|
# abort all the threads
|
||||||
|
|
||||||
currentSearchScheduler.abort = True
|
|
||||||
logger.log(u"Waiting for the SEARCH thread to exit")
|
|
||||||
try:
|
|
||||||
currentSearchScheduler.thread.join(10)
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
backlogSearchScheduler.abort = True
|
backlogSearchScheduler.abort = True
|
||||||
logger.log(u"Waiting for the BACKLOG thread to exit")
|
logger.log(u"Waiting for the BACKLOG thread to exit")
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -163,7 +163,6 @@ def change_SEARCH_FREQUENCY(freq):
|
||||||
if sickbeard.SEARCH_FREQUENCY < sickbeard.MIN_SEARCH_FREQUENCY:
|
if sickbeard.SEARCH_FREQUENCY < sickbeard.MIN_SEARCH_FREQUENCY:
|
||||||
sickbeard.SEARCH_FREQUENCY = sickbeard.MIN_SEARCH_FREQUENCY
|
sickbeard.SEARCH_FREQUENCY = sickbeard.MIN_SEARCH_FREQUENCY
|
||||||
|
|
||||||
sickbeard.currentSearchScheduler.cycleTime = datetime.timedelta(minutes=sickbeard.SEARCH_FREQUENCY)
|
|
||||||
sickbeard.backlogSearchScheduler.cycleTime = datetime.timedelta(minutes=sickbeard.get_backlog_cycle_time())
|
sickbeard.backlogSearchScheduler.cycleTime = datetime.timedelta(minutes=sickbeard.get_backlog_cycle_time())
|
||||||
|
|
||||||
def change_UPDATE_FREQUENCY(freq):
|
def change_UPDATE_FREQUENCY(freq):
|
||||||
|
|
|
@ -57,6 +57,8 @@ class DBConnection:
|
||||||
self.connection.row_factory = sqlite3.Row
|
self.connection.row_factory = sqlite3.Row
|
||||||
|
|
||||||
def checkDBVersion(self):
|
def checkDBVersion(self):
|
||||||
|
result = None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
result = self.select("SELECT db_version FROM db_version")
|
result = self.select("SELECT db_version FROM db_version")
|
||||||
except sqlite3.OperationalError, e:
|
except sqlite3.OperationalError, e:
|
||||||
|
|
|
@ -20,8 +20,12 @@ import datetime
|
||||||
import threading
|
import threading
|
||||||
import Queue
|
import Queue
|
||||||
|
|
||||||
|
import sickbeard
|
||||||
|
from lib.concurrent.futures.thread import ThreadPoolExecutor
|
||||||
|
|
||||||
from sickbeard import logger
|
from sickbeard import logger
|
||||||
|
|
||||||
|
|
||||||
class QueuePriorities:
|
class QueuePriorities:
|
||||||
LOW = 10
|
LOW = 10
|
||||||
NORMAL = 20
|
NORMAL = 20
|
||||||
|
@ -29,11 +33,12 @@ class QueuePriorities:
|
||||||
|
|
||||||
class GenericQueue:
|
class GenericQueue:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
#self.executor = ThreadPoolExecutor(sickbeard.NUM_OF_THREADS)
|
||||||
self.currentItem = None
|
self.currentItem = None
|
||||||
self.thread = None
|
self.thread = None
|
||||||
self.queue_name = "QUEUE"
|
self.queue_name = "QUEUE"
|
||||||
self.min_priority = 0
|
self.min_priority = 0
|
||||||
self.queue = Queue.PriorityQueue()
|
self.queue = Queue.Queue()
|
||||||
|
|
||||||
def pause(self):
|
def pause(self):
|
||||||
logger.log(u"Pausing queue")
|
logger.log(u"Pausing queue")
|
||||||
|
@ -45,7 +50,7 @@ class GenericQueue:
|
||||||
|
|
||||||
def add_item(self, item):
|
def add_item(self, item):
|
||||||
item.added = datetime.datetime.now()
|
item.added = datetime.datetime.now()
|
||||||
self.queue.put(item, item.priority)
|
self.queue.put(item)
|
||||||
return item
|
return item
|
||||||
|
|
||||||
def run(self, queue=None):
|
def run(self, queue=None):
|
||||||
|
@ -67,12 +72,10 @@ class GenericQueue:
|
||||||
return
|
return
|
||||||
|
|
||||||
threadName = self.queue_name + '-' + queueItem.get_thread_name()
|
threadName = self.queue_name + '-' + queueItem.get_thread_name()
|
||||||
self.thread = threading.Thread(None, queueItem.execute, threadName)
|
executor = ThreadPoolExecutor(sickbeard.NUM_OF_THREADS)
|
||||||
self.thread.start()
|
self.thread = executor.submit(queueItem.execute, name=threadName)
|
||||||
|
|
||||||
self.currentItem = queueItem
|
self.currentItem = queueItem
|
||||||
|
|
||||||
|
|
||||||
class QueueItem:
|
class QueueItem:
|
||||||
def __init__(self, name, action_id=0):
|
def __init__(self, name, action_id=0):
|
||||||
self.name = name
|
self.name = name
|
||||||
|
|
|
@ -213,7 +213,6 @@ class NameParser(object):
|
||||||
i = result = 0
|
i = result = 0
|
||||||
for integer, numeral in numeral_map:
|
for integer, numeral in numeral_map:
|
||||||
while n[i:i + len(numeral)] == numeral:
|
while n[i:i + len(numeral)] == numeral:
|
||||||
time.sleep(1)
|
|
||||||
result += integer
|
result += integer
|
||||||
i += len(numeral)
|
i += len(numeral)
|
||||||
|
|
||||||
|
|
|
@ -37,7 +37,11 @@ class PLEXNotifier(XBMCNotifier):
|
||||||
def _notify_pmc(self, message, title="Sick Beard", host=None, username=None, password=None, force=False):
|
def _notify_pmc(self, message, title="Sick Beard", host=None, username=None, password=None, force=False):
|
||||||
# fill in omitted parameters
|
# fill in omitted parameters
|
||||||
if not host:
|
if not host:
|
||||||
host = sickbeard.PLEX_HOST
|
if sickbeard.PLEX_HOST:
|
||||||
|
host = sickbeard.PLEX_HOST # Use the default Plex host
|
||||||
|
else:
|
||||||
|
logger.log(u"No Plex host specified, check your settings", logger.DEBUG)
|
||||||
|
return False
|
||||||
if not username:
|
if not username:
|
||||||
username = sickbeard.PLEX_USERNAME
|
username = sickbeard.PLEX_USERNAME
|
||||||
if not password:
|
if not password:
|
||||||
|
|
|
@ -329,9 +329,8 @@ class BTNCache(tvcache.TVCache):
|
||||||
if ci is not None:
|
if ci is not None:
|
||||||
cl.append(ci)
|
cl.append(ci)
|
||||||
|
|
||||||
if len(cl) > 0:
|
myDB = self._getDB()
|
||||||
myDB = self._getDB()
|
myDB.mass_action(cl)
|
||||||
myDB.mass_action(cl)
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
raise AuthException(
|
raise AuthException(
|
||||||
|
|
|
@ -236,12 +236,10 @@ class GenericProvider:
|
||||||
searchItems = {}
|
searchItems = {}
|
||||||
itemList = []
|
itemList = []
|
||||||
|
|
||||||
if manualSearch:
|
if not manualSearch:
|
||||||
self.cache.updateCache()
|
self.cache.updateCache()
|
||||||
|
|
||||||
for epObj in episodes:
|
for epObj in episodes:
|
||||||
|
|
||||||
|
|
||||||
cacheResult = self.cache.searchCache(epObj, manualSearch)
|
cacheResult = self.cache.searchCache(epObj, manualSearch)
|
||||||
if len(cacheResult):
|
if len(cacheResult):
|
||||||
results.update(cacheResult)
|
results.update(cacheResult)
|
||||||
|
|
|
@ -358,9 +358,8 @@ class HDTorrentsCache(tvcache.TVCache):
|
||||||
if ci is not None:
|
if ci is not None:
|
||||||
cl.append(ci)
|
cl.append(ci)
|
||||||
|
|
||||||
if len(cl) > 0:
|
myDB = self._getDB()
|
||||||
myDB = self._getDB()
|
myDB.mass_action(cl)
|
||||||
myDB.mass_action(cl)
|
|
||||||
|
|
||||||
def _parseItem(self, item):
|
def _parseItem(self, item):
|
||||||
|
|
||||||
|
@ -369,7 +368,7 @@ class HDTorrentsCache(tvcache.TVCache):
|
||||||
if not title or not url:
|
if not title or not url:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
logger.log(u"Attempting to cache item:" + title, logger.DEBUG)
|
logger.log(u"Attempting to cache item:[" + title +"]", logger.DEBUG)
|
||||||
|
|
||||||
return self._addCacheEntry(title, url)
|
return self._addCacheEntry(title, url)
|
||||||
|
|
||||||
|
|
|
@ -304,9 +304,8 @@ class IPTorrentsCache(tvcache.TVCache):
|
||||||
if ci is not None:
|
if ci is not None:
|
||||||
cl.append(ci)
|
cl.append(ci)
|
||||||
|
|
||||||
if len(cl) > 0:
|
myDB = self._getDB()
|
||||||
myDB = self._getDB()
|
myDB.mass_action(cl)
|
||||||
myDB.mass_action(cl)
|
|
||||||
|
|
||||||
def _parseItem(self, item):
|
def _parseItem(self, item):
|
||||||
|
|
||||||
|
@ -315,7 +314,7 @@ class IPTorrentsCache(tvcache.TVCache):
|
||||||
if not title or not url:
|
if not title or not url:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
logger.log(u"Attempting to cache item:" + title, logger.DEBUG)
|
logger.log(u"Attempting to cache item:[" + title +"]", logger.DEBUG)
|
||||||
|
|
||||||
return self._addCacheEntry(title, url)
|
return self._addCacheEntry(title, url)
|
||||||
|
|
||||||
|
|
|
@ -427,15 +427,13 @@ class KATCache(tvcache.TVCache):
|
||||||
|
|
||||||
cl = []
|
cl = []
|
||||||
for result in rss_results:
|
for result in rss_results:
|
||||||
|
|
||||||
item = (result[0], result[1])
|
item = (result[0], result[1])
|
||||||
ci = self._parseItem(item)
|
ci = self._parseItem(item)
|
||||||
if ci is not None:
|
if ci is not None:
|
||||||
cl.append(ci)
|
cl.append(ci)
|
||||||
|
|
||||||
if len(cl) > 0:
|
myDB = self._getDB()
|
||||||
myDB = self._getDB()
|
myDB.mass_action(cl)
|
||||||
myDB.mass_action(cl)
|
|
||||||
|
|
||||||
def _parseItem(self, item):
|
def _parseItem(self, item):
|
||||||
|
|
||||||
|
@ -444,7 +442,7 @@ class KATCache(tvcache.TVCache):
|
||||||
if not title or not url:
|
if not title or not url:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
logger.log(u"Attempting to cache item:" + title, logger.DEBUG)
|
logger.log(u"Attempting to cache item:[" + title +"]", logger.DEBUG)
|
||||||
|
|
||||||
return self._addCacheEntry(title, url)
|
return self._addCacheEntry(title, url)
|
||||||
|
|
||||||
|
|
|
@ -272,3 +272,59 @@ class NewznabCache(tvcache.TVCache):
|
||||||
|
|
||||||
def _checkAuth(self, data):
|
def _checkAuth(self, data):
|
||||||
return self.provider._checkAuthFromData(data)
|
return self.provider._checkAuthFromData(data)
|
||||||
|
|
||||||
|
|
||||||
|
def updateCache(self):
|
||||||
|
if not self.shouldUpdate():
|
||||||
|
return
|
||||||
|
|
||||||
|
if self._checkAuth(None):
|
||||||
|
data = self._getRSSData()
|
||||||
|
|
||||||
|
# as long as the http request worked we count this as an update
|
||||||
|
if data:
|
||||||
|
self.setLastUpdate()
|
||||||
|
else:
|
||||||
|
return []
|
||||||
|
|
||||||
|
# now that we've loaded the current RSS feed lets delete the old cache
|
||||||
|
logger.log(u"Clearing " + self.provider.name + " cache and updating with new information")
|
||||||
|
self._clearCache()
|
||||||
|
|
||||||
|
|
||||||
|
if self._checkAuth(data):
|
||||||
|
items = data.entries
|
||||||
|
ql = []
|
||||||
|
for item in items:
|
||||||
|
ci = self._parseItem(item)
|
||||||
|
if ci is not None:
|
||||||
|
ql.append(ci)
|
||||||
|
|
||||||
|
myDB = self._getDB()
|
||||||
|
myDB.mass_action(ql)
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise AuthException(
|
||||||
|
u"Your authentication credentials for " + self.provider.name + " are incorrect, check your config")
|
||||||
|
|
||||||
|
return []
|
||||||
|
|
||||||
|
|
||||||
|
# overwrite method with that parses the rageid from the newznab feed
|
||||||
|
def _parseItem(self, item):
|
||||||
|
title = item.title
|
||||||
|
url = item.link
|
||||||
|
|
||||||
|
self._checkItemAuth(title, url)
|
||||||
|
|
||||||
|
if not title or not url:
|
||||||
|
logger.log(
|
||||||
|
u"The data returned from the " + self.provider.name + " feed is incomplete, this result is unusable",
|
||||||
|
logger.DEBUG)
|
||||||
|
return None
|
||||||
|
|
||||||
|
url = self._translateLinkURL(url)
|
||||||
|
|
||||||
|
logger.log(u"Adding item from RSS to cache: " + title, logger.DEBUG)
|
||||||
|
|
||||||
|
return self._addCacheEntry(title, url)
|
||||||
|
|
|
@ -353,9 +353,8 @@ class NextGenCache(tvcache.TVCache):
|
||||||
if ci is not None:
|
if ci is not None:
|
||||||
cl.append(ci)
|
cl.append(ci)
|
||||||
|
|
||||||
if len(cl) > 0:
|
myDB = self._getDB()
|
||||||
myDB = self._getDB()
|
myDB.mass_action(cl)
|
||||||
myDB.mass_action(cl)
|
|
||||||
|
|
||||||
def _parseItem(self, item):
|
def _parseItem(self, item):
|
||||||
|
|
||||||
|
@ -364,7 +363,7 @@ class NextGenCache(tvcache.TVCache):
|
||||||
if not title or not url:
|
if not title or not url:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
logger.log(u"Attempting to cache item:" + title, logger.DEBUG)
|
logger.log(u"Attempting to cache item:[" + title +"]", logger.DEBUG)
|
||||||
|
|
||||||
return self._addCacheEntry(title, url)
|
return self._addCacheEntry(title, url)
|
||||||
|
|
||||||
|
|
|
@ -335,7 +335,7 @@ class PublicHDCache(tvcache.TVCache):
|
||||||
if not title or not url:
|
if not title or not url:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
logger.log(u"Attempting to cache item:" + title, logger.DEBUG)
|
logger.log(u"Attempting to cache item:[" + title +"]", logger.DEBUG)
|
||||||
|
|
||||||
return self._addCacheEntry(title, url)
|
return self._addCacheEntry(title, url)
|
||||||
|
|
||||||
|
|
|
@ -343,9 +343,8 @@ class SCCCache(tvcache.TVCache):
|
||||||
if ci is not None:
|
if ci is not None:
|
||||||
cl.append(ci)
|
cl.append(ci)
|
||||||
|
|
||||||
if len(cl) > 0:
|
myDB = self._getDB()
|
||||||
myDB = self._getDB()
|
myDB.mass_action(cl)
|
||||||
myDB.mass_action(cl)
|
|
||||||
|
|
||||||
def _parseItem(self, item):
|
def _parseItem(self, item):
|
||||||
|
|
||||||
|
@ -354,7 +353,7 @@ class SCCCache(tvcache.TVCache):
|
||||||
if not title or not url:
|
if not title or not url:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
logger.log(u"Attempting to cache item:" + title, logger.DEBUG)
|
logger.log(u"Attempting to cache item:[" + title +"]", logger.DEBUG)
|
||||||
|
|
||||||
return self._addCacheEntry(title, url)
|
return self._addCacheEntry(title, url)
|
||||||
|
|
||||||
|
|
|
@ -293,7 +293,7 @@ class SpeedCDCache(tvcache.TVCache):
|
||||||
if not title or not url:
|
if not title or not url:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
logger.log(u"Attempting to cache item:" + title, logger.DEBUG)
|
logger.log(u"Attempting to cache item:[" + title +"]", logger.DEBUG)
|
||||||
|
|
||||||
return self._addCacheEntry(title, url)
|
return self._addCacheEntry(title, url)
|
||||||
|
|
||||||
|
|
|
@ -424,9 +424,8 @@ class ThePirateBayCache(tvcache.TVCache):
|
||||||
if ci is not None:
|
if ci is not None:
|
||||||
cl.append(ci)
|
cl.append(ci)
|
||||||
|
|
||||||
if len(cl) > 0:
|
myDB = self._getDB()
|
||||||
myDB = self._getDB()
|
myDB.mass_action(cl)
|
||||||
myDB.mass_action(cl)
|
|
||||||
|
|
||||||
def _parseItem(self, item):
|
def _parseItem(self, item):
|
||||||
|
|
||||||
|
@ -435,7 +434,7 @@ class ThePirateBayCache(tvcache.TVCache):
|
||||||
if not title or not url:
|
if not title or not url:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
logger.log(u"Attempting to cache item:" + title, logger.DEBUG)
|
logger.log(u"Attempting to cache item:[" + title +"]", logger.DEBUG)
|
||||||
|
|
||||||
return self._addCacheEntry(title, url)
|
return self._addCacheEntry(title, url)
|
||||||
|
|
||||||
|
|
|
@ -305,9 +305,8 @@ class TorrentDayCache(tvcache.TVCache):
|
||||||
if ci is not None:
|
if ci is not None:
|
||||||
cl.append(ci)
|
cl.append(ci)
|
||||||
|
|
||||||
if len(cl) > 0:
|
myDB = self._getDB()
|
||||||
myDB = self._getDB()
|
myDB.mass_action(cl)
|
||||||
myDB.mass_action(cl)
|
|
||||||
|
|
||||||
def _parseItem(self, item):
|
def _parseItem(self, item):
|
||||||
|
|
||||||
|
@ -316,7 +315,7 @@ class TorrentDayCache(tvcache.TVCache):
|
||||||
if not title or not url:
|
if not title or not url:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
logger.log(u"Attempting to cache item:" + title, logger.DEBUG)
|
logger.log(u"Attempting to cache item:[" + title +"]", logger.DEBUG)
|
||||||
|
|
||||||
return self._addCacheEntry(title, url)
|
return self._addCacheEntry(title, url)
|
||||||
|
|
||||||
|
|
|
@ -304,9 +304,8 @@ class TorrentLeechCache(tvcache.TVCache):
|
||||||
if ci is not None:
|
if ci is not None:
|
||||||
cl.append(ci)
|
cl.append(ci)
|
||||||
|
|
||||||
if len(cl) > 0:
|
myDB = self._getDB()
|
||||||
myDB = self._getDB()
|
myDB.mass_action(cl)
|
||||||
myDB.mass_action(cl)
|
|
||||||
|
|
||||||
def _parseItem(self, item):
|
def _parseItem(self, item):
|
||||||
|
|
||||||
|
@ -315,7 +314,7 @@ class TorrentLeechCache(tvcache.TVCache):
|
||||||
if not title or not url:
|
if not title or not url:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
logger.log(u"Attempting to cache item:" + title, logger.DEBUG)
|
logger.log(u"Attempting to cache item:[" + title +"]", logger.DEBUG)
|
||||||
|
|
||||||
return self._addCacheEntry(title, url)
|
return self._addCacheEntry(title, url)
|
||||||
|
|
||||||
|
|
|
@ -369,9 +369,6 @@ def searchProviders(queueItem, show, season, episodes, curProvider, seasonSearch
|
||||||
foundResults = {}
|
foundResults = {}
|
||||||
finalResults = []
|
finalResults = []
|
||||||
|
|
||||||
if manualSearch:
|
|
||||||
curProvider.cache.updateCache()
|
|
||||||
|
|
||||||
# convert indexer numbering to scene numbering for searches
|
# convert indexer numbering to scene numbering for searches
|
||||||
map(lambda x: x.convertToSceneNumbering, episodes)
|
map(lambda x: x.convertToSceneNumbering, episodes)
|
||||||
|
|
||||||
|
@ -406,7 +403,6 @@ def searchProviders(queueItem, show, season, episodes, curProvider, seasonSearch
|
||||||
highest_quality_overall = 0
|
highest_quality_overall = 0
|
||||||
for cur_episode in foundResults:
|
for cur_episode in foundResults:
|
||||||
for cur_result in foundResults[cur_episode]:
|
for cur_result in foundResults[cur_episode]:
|
||||||
cur_result.queue_item = queueItem
|
|
||||||
if cur_result.quality != Quality.UNKNOWN and cur_result.quality > highest_quality_overall:
|
if cur_result.quality != Quality.UNKNOWN and cur_result.quality > highest_quality_overall:
|
||||||
highest_quality_overall = cur_result.quality
|
highest_quality_overall = cur_result.quality
|
||||||
logger.log(u"The highest quality of any match is " + Quality.qualityStrings[highest_quality_overall], logger.DEBUG)
|
logger.log(u"The highest quality of any match is " + Quality.qualityStrings[highest_quality_overall], logger.DEBUG)
|
||||||
|
@ -574,4 +570,5 @@ def searchProviders(queueItem, show, season, episodes, curProvider, seasonSearch
|
||||||
|
|
||||||
finalResults.append(pickBestResult(foundResults[curEp], show))
|
finalResults.append(pickBestResult(foundResults[curEp], show))
|
||||||
|
|
||||||
return finalResults
|
queueItem.results = finalResults
|
||||||
|
return queueItem
|
||||||
|
|
|
@ -22,113 +22,43 @@ import datetime
|
||||||
import Queue
|
import Queue
|
||||||
import time
|
import time
|
||||||
import traceback
|
import traceback
|
||||||
|
import threading
|
||||||
|
|
||||||
import sickbeard
|
import sickbeard
|
||||||
from sickbeard import db, logger, common, exceptions, helpers
|
from sickbeard import db, logger, common, exceptions, helpers
|
||||||
from sickbeard import generic_queue, scheduler
|
from sickbeard import generic_queue, scheduler
|
||||||
from sickbeard import search, failed_history, history
|
from sickbeard import search, failed_history, history
|
||||||
from sickbeard import ui
|
from sickbeard import ui
|
||||||
from lib.concurrent import futures
|
from sickbeard.snatch_queue import SnatchQueue
|
||||||
|
from lib.concurrent.futures import as_completed
|
||||||
from lib.concurrent.futures.thread import ThreadPoolExecutor
|
from lib.concurrent.futures.thread import ThreadPoolExecutor
|
||||||
|
|
||||||
|
search_queue_lock = threading.Lock()
|
||||||
|
|
||||||
BACKLOG_SEARCH = 10
|
BACKLOG_SEARCH = 10
|
||||||
RSS_SEARCH = 20
|
RSS_SEARCH = 20
|
||||||
FAILED_SEARCH = 30
|
FAILED_SEARCH = 30
|
||||||
MANUAL_SEARCH = 30
|
MANUAL_SEARCH = 30
|
||||||
SNATCH = 40
|
SNATCH = 40
|
||||||
|
|
||||||
# snatch queues
|
|
||||||
ManualSnatchQueue = Queue.PriorityQueue()
|
|
||||||
RSSSnatchQueue = Queue.PriorityQueue()
|
|
||||||
BacklogSnatchQueue = Queue.PriorityQueue()
|
|
||||||
FailedSnatchQueue = Queue.PriorityQueue()
|
|
||||||
|
|
||||||
SearchItemQueue = Queue.PriorityQueue()
|
|
||||||
|
|
||||||
|
|
||||||
class SnatchQueue(generic_queue.GenericQueue):
|
|
||||||
def __init__(self):
|
|
||||||
generic_queue.GenericQueue.__init__(self)
|
|
||||||
self.queue_name = "SNATCHQUEUE"
|
|
||||||
|
|
||||||
def is_in_queue(self, show, episodes, quality):
|
|
||||||
for cur_item in self.queue.queue:
|
|
||||||
if cur_item.results.extraInfo[0] == show \
|
|
||||||
and cur_item.results.episodes.sort() == episodes.sort() \
|
|
||||||
and cur_item.results.quality >= quality:
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
def add_item(self, item):
|
|
||||||
# dynamically select our snatch queue
|
|
||||||
if item.type == 'RSSSearchQueueItem':
|
|
||||||
self.queue = RSSSnatchQueue
|
|
||||||
elif item.type == 'ManualSearchQueueItem':
|
|
||||||
self.queue = ManualSnatchQueue
|
|
||||||
elif item.type == 'BacklogQueueItem':
|
|
||||||
self.queue = BacklogSnatchQueue
|
|
||||||
elif item.type == 'FailedQueueItem':
|
|
||||||
self.queue = FailedSnatchQueue
|
|
||||||
else:
|
|
||||||
return
|
|
||||||
|
|
||||||
# check if we already have a item ready to snatch with same or better quality score
|
|
||||||
if not self.is_in_queue(item.results.extraInfo[0], item.results.episodes, item.results.quality):
|
|
||||||
generic_queue.GenericQueue.add_item(self, item)
|
|
||||||
else:
|
|
||||||
logger.log(
|
|
||||||
u"Not adding item [" + item.results.name + "] it's already in the queue with same or higher quality",
|
|
||||||
logger.DEBUG)
|
|
||||||
|
|
||||||
|
|
||||||
class SnatchQueueItem(generic_queue.QueueItem):
|
|
||||||
def __init__(self, results, queue_item):
|
|
||||||
generic_queue.QueueItem.__init__(self, 'Snatch', SNATCH)
|
|
||||||
self.priority = generic_queue.QueuePriorities.HIGH
|
|
||||||
self.thread_name = 'SNATCH-' + str(results.extraInfo[0].indexerid)
|
|
||||||
self.results = results
|
|
||||||
self.success = None
|
|
||||||
self.queue_item = queue_item
|
|
||||||
self.type = queue_item.type
|
|
||||||
|
|
||||||
def execute(self):
|
|
||||||
generic_queue.QueueItem.execute(self)
|
|
||||||
|
|
||||||
# just use the first result for now
|
|
||||||
logger.log(u"Downloading " + self.results.name + " from " + self.results.provider.name)
|
|
||||||
|
|
||||||
result = search.snatchEpisode(self.results)
|
|
||||||
|
|
||||||
if self.type == "ManualSearchQueueItem":
|
|
||||||
providerModule = self.results.provider
|
|
||||||
if not result:
|
|
||||||
ui.notifications.error(
|
|
||||||
'Error while attempting to snatch ' + self.results.name + ', check your logs')
|
|
||||||
elif providerModule == None:
|
|
||||||
ui.notifications.error('Provider is configured incorrectly, unable to download')
|
|
||||||
|
|
||||||
self.success = result
|
|
||||||
self.queue_item.success = result
|
|
||||||
|
|
||||||
generic_queue.QueueItem.finish(self.queue_item)
|
|
||||||
generic_queue.QueueItem.finish(self)
|
|
||||||
|
|
||||||
class SearchQueue(generic_queue.GenericQueue):
|
class SearchQueue(generic_queue.GenericQueue):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
generic_queue.GenericQueue.__init__(self)
|
generic_queue.GenericQueue.__init__(self)
|
||||||
self.queue_name = "SEARCHQUEUE"
|
self.queue_name = "SEARCHQUEUE"
|
||||||
self.queue = SearchItemQueue
|
|
||||||
|
|
||||||
def is_in_queue(self, show, segment):
|
def is_in_queue(self, show, segment):
|
||||||
for cur_item in self.queue.queue:
|
for cur_item in self.queue.queue:
|
||||||
if isinstance(cur_item, BacklogQueueItem) and cur_item.show == show and cur_item.segment == segment:
|
with search_queue_lock:
|
||||||
return True
|
if isinstance(cur_item, BacklogQueueItem) and cur_item.show == show and cur_item.segment == segment:
|
||||||
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def is_ep_in_queue(self, ep_obj):
|
def is_ep_in_queue(self, ep_obj):
|
||||||
for cur_item in self.queue.queue:
|
for cur_item in self.queue.queue:
|
||||||
if isinstance(cur_item, ManualSearchQueueItem) and cur_item.ep_obj == ep_obj:
|
with search_queue_lock:
|
||||||
return True
|
if isinstance(cur_item, ManualSearchQueueItem) and cur_item.ep_obj == ep_obj:
|
||||||
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def pause_backlog(self):
|
def pause_backlog(self):
|
||||||
|
@ -143,15 +73,14 @@ class SearchQueue(generic_queue.GenericQueue):
|
||||||
|
|
||||||
def is_backlog_in_progress(self):
|
def is_backlog_in_progress(self):
|
||||||
for cur_item in self.queue.queue + [self.currentItem]:
|
for cur_item in self.queue.queue + [self.currentItem]:
|
||||||
if isinstance(cur_item, BacklogQueueItem):
|
with search_queue_lock:
|
||||||
return True
|
if isinstance(cur_item, BacklogQueueItem):
|
||||||
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def add_item(self, item):
|
def add_item(self, item):
|
||||||
|
|
||||||
if isinstance(item, RSSSearchQueueItem):
|
if isinstance(item, BacklogQueueItem) and not self.is_in_queue(item.show, item.segment):
|
||||||
generic_queue.GenericQueue.add_item(self, item)
|
|
||||||
elif isinstance(item, BacklogQueueItem) and not self.is_in_queue(item.show, item.segment):
|
|
||||||
generic_queue.GenericQueue.add_item(self, item)
|
generic_queue.GenericQueue.add_item(self, item)
|
||||||
elif isinstance(item, ManualSearchQueueItem) and not self.is_ep_in_queue(item.ep_obj):
|
elif isinstance(item, ManualSearchQueueItem) and not self.is_ep_in_queue(item.ep_obj):
|
||||||
generic_queue.GenericQueue.add_item(self, item)
|
generic_queue.GenericQueue.add_item(self, item)
|
||||||
|
@ -165,7 +94,6 @@ class ManualSearchQueueItem(generic_queue.QueueItem):
|
||||||
def __init__(self, ep_obj):
|
def __init__(self, ep_obj):
|
||||||
generic_queue.QueueItem.__init__(self, 'Manual Search', MANUAL_SEARCH)
|
generic_queue.QueueItem.__init__(self, 'Manual Search', MANUAL_SEARCH)
|
||||||
self.priority = generic_queue.QueuePriorities.HIGH
|
self.priority = generic_queue.QueuePriorities.HIGH
|
||||||
self.type = self.__class__.__name__
|
|
||||||
self.thread_name = 'MANUAL-' + str(ep_obj.show.indexerid)
|
self.thread_name = 'MANUAL-' + str(ep_obj.show.indexerid)
|
||||||
self.success = None
|
self.success = None
|
||||||
self.show = ep_obj.show
|
self.show = ep_obj.show
|
||||||
|
@ -174,113 +102,49 @@ class ManualSearchQueueItem(generic_queue.QueueItem):
|
||||||
def execute(self):
|
def execute(self):
|
||||||
generic_queue.QueueItem.execute(self)
|
generic_queue.QueueItem.execute(self)
|
||||||
|
|
||||||
fs = []
|
|
||||||
didSearch = False
|
didSearch = False
|
||||||
|
|
||||||
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive()]
|
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive()]
|
||||||
try:
|
try:
|
||||||
with ThreadPoolExecutor(sickbeard.NUM_OF_THREADS) as executor:
|
for provider in providers:
|
||||||
for provider in providers:
|
logger.log("Beginning manual search for [" + self.ep_obj.prettyName() + "] on " + provider.name)
|
||||||
didSearch = True
|
searchResult = search.searchProviders(self, self.show, self.ep_obj.season, [self.ep_obj], provider,
|
||||||
logger.log("Beginning manual search for [" + self.ep_obj.prettyName() + "] on " + provider.name)
|
False,
|
||||||
executor.submit(
|
True)
|
||||||
search.searchProviders, self, self.show, self.ep_obj.season, [self.ep_obj], provider, False,
|
|
||||||
True).add_done_callback(snatch_results)
|
didSearch = True
|
||||||
executor.shutdown(wait=True)
|
if searchResult:
|
||||||
except Exception, e:
|
self.success = SnatchQueue().process_results(searchResult)
|
||||||
|
if self.success:
|
||||||
|
break
|
||||||
|
|
||||||
|
except Exception:
|
||||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||||
|
stop = True
|
||||||
|
|
||||||
if not didSearch:
|
if not didSearch:
|
||||||
logger.log(
|
logger.log(
|
||||||
u"No NZB/Torrent providers found or enabled in your SickRage config. Please check your settings.",
|
u"No NZB/Torrent providers found or enabled in your SickRage config. Please check your settings.",
|
||||||
logger.ERROR)
|
logger.ERROR)
|
||||||
|
|
||||||
if ManualSnatchQueue.empty():
|
if not self.success:
|
||||||
ui.notifications.message('No downloads were found',
|
ui.notifications.message('No downloads were found',
|
||||||
"Couldn't find a download for <i>%s</i>" % self.ep_obj.prettyName())
|
"Couldn't find a download for <i>%s</i>" % self.ep_obj.prettyName())
|
||||||
logger.log(u"Unable to find a download for " + self.ep_obj.prettyName())
|
logger.log(u"Unable to find a download for " + self.ep_obj.prettyName())
|
||||||
else:
|
|
||||||
# snatch all items in queue
|
|
||||||
scheduler.Scheduler(SnatchQueue(), silent=True, runOnce=True, queue=ManualSnatchQueue).thread.start()
|
|
||||||
|
|
||||||
|
self.finish()
|
||||||
|
|
||||||
|
def finish(self):
|
||||||
|
# don't let this linger if something goes wrong
|
||||||
|
if self.success == None:
|
||||||
|
self.success = False
|
||||||
generic_queue.QueueItem.finish(self)
|
generic_queue.QueueItem.finish(self)
|
||||||
|
|
||||||
class RSSSearchQueueItem(generic_queue.QueueItem):
|
|
||||||
def __init__(self):
|
|
||||||
generic_queue.QueueItem.__init__(self, 'RSS Search', RSS_SEARCH)
|
|
||||||
self.thread_name = 'RSSFEED'
|
|
||||||
self.type = self.__class__.__name__
|
|
||||||
|
|
||||||
def execute(self):
|
|
||||||
generic_queue.QueueItem.execute(self)
|
|
||||||
|
|
||||||
results = False
|
|
||||||
didSearch = False
|
|
||||||
|
|
||||||
self._changeMissingEpisodes()
|
|
||||||
|
|
||||||
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive()]
|
|
||||||
|
|
||||||
try:
|
|
||||||
with ThreadPoolExecutor(sickbeard.NUM_OF_THREADS) as executor:
|
|
||||||
for provider in providers:
|
|
||||||
didSearch = True
|
|
||||||
logger.log("Beginning RSS Feed search on " + provider.name)
|
|
||||||
executor.submit(search.searchForNeededEpisodes, provider).add_done_callback(snatch_results)
|
|
||||||
executor.shutdown(wait=True)
|
|
||||||
except:
|
|
||||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
|
||||||
|
|
||||||
if not didSearch:
|
|
||||||
logger.log(
|
|
||||||
u"No NZB/Torrent providers found or enabled in your SickRage config. Please check your settings.",
|
|
||||||
logger.ERROR)
|
|
||||||
|
|
||||||
if RSSSnatchQueue.empty():
|
|
||||||
logger.log(u"No needed episodes found on the RSS feeds")
|
|
||||||
else:
|
|
||||||
# snatch all items in queue
|
|
||||||
scheduler.Scheduler(SnatchQueue(), silent=True, runOnce=True, queue=RSSSnatchQueue).thread.start()
|
|
||||||
|
|
||||||
generic_queue.QueueItem.finish(self)
|
|
||||||
|
|
||||||
def _changeMissingEpisodes(self):
|
|
||||||
|
|
||||||
logger.log(u"Changing all old missing episodes to status WANTED")
|
|
||||||
|
|
||||||
curDate = datetime.date.today().toordinal()
|
|
||||||
|
|
||||||
myDB = db.DBConnection()
|
|
||||||
sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE status = ? AND airdate < ?",
|
|
||||||
[common.UNAIRED, curDate])
|
|
||||||
|
|
||||||
for sqlEp in sqlResults:
|
|
||||||
|
|
||||||
try:
|
|
||||||
show = helpers.findCertainShow(sickbeard.showList, int(sqlEp["showid"]))
|
|
||||||
except exceptions.MultipleShowObjectsException:
|
|
||||||
logger.log(u"ERROR: expected to find a single show matching " + str(sqlEp["showid"]))
|
|
||||||
return None
|
|
||||||
|
|
||||||
if show == None:
|
|
||||||
logger.log(u"Unable to find the show with ID " + str(
|
|
||||||
sqlEp["showid"]) + " in your show list! DB value was " + str(sqlEp), logger.ERROR)
|
|
||||||
return None
|
|
||||||
|
|
||||||
ep = show.getEpisode(sqlEp["season"], sqlEp["episode"])
|
|
||||||
with ep.lock:
|
|
||||||
if ep.show.paused:
|
|
||||||
ep.status = common.SKIPPED
|
|
||||||
else:
|
|
||||||
ep.status = common.WANTED
|
|
||||||
ep.saveToDB()
|
|
||||||
|
|
||||||
|
|
||||||
class BacklogQueueItem(generic_queue.QueueItem):
|
class BacklogQueueItem(generic_queue.QueueItem):
|
||||||
def __init__(self, show, segment):
|
def __init__(self, show, segment):
|
||||||
generic_queue.QueueItem.__init__(self, 'Backlog', BACKLOG_SEARCH)
|
generic_queue.QueueItem.__init__(self, 'Backlog', BACKLOG_SEARCH)
|
||||||
self.priority = generic_queue.QueuePriorities.LOW
|
self.priority = generic_queue.QueuePriorities.LOW
|
||||||
self.type = self.__class__.__name__
|
|
||||||
self.thread_name = 'BACKLOG-' + str(show.indexerid)
|
self.thread_name = 'BACKLOG-' + str(show.indexerid)
|
||||||
|
|
||||||
self.show = show
|
self.show = show
|
||||||
|
@ -315,7 +179,7 @@ class BacklogQueueItem(generic_queue.QueueItem):
|
||||||
def execute(self):
|
def execute(self):
|
||||||
generic_queue.QueueItem.execute(self)
|
generic_queue.QueueItem.execute(self)
|
||||||
|
|
||||||
results = False
|
fs = []
|
||||||
didSearch = False
|
didSearch = False
|
||||||
|
|
||||||
# check if we want to search for season packs instead of just season/episode
|
# check if we want to search for season packs instead of just season/episode
|
||||||
|
@ -327,15 +191,18 @@ class BacklogQueueItem(generic_queue.QueueItem):
|
||||||
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive()]
|
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive()]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with ThreadPoolExecutor(sickbeard.NUM_OF_THREADS) as executor:
|
for provider in providers:
|
||||||
for provider in providers:
|
logger.log("Beginning backlog search for [" + str(self.segment) + "] on " + provider.name)
|
||||||
didSearch = True
|
searchResult = search.searchProviders(self, self.show, self.segment, self.wantedEpisodes, provider,
|
||||||
logger.log("Beginning backlog search for [" + str(self.segment) + "] on " + provider.name)
|
seasonSearch, False)
|
||||||
executor.submit(
|
|
||||||
search.searchProviders, self, self.show, self.segment, self.wantedEpisodes, provider,
|
didSearch = True
|
||||||
seasonSearch, False).add_done_callback(snatch_results)
|
if searchResult:
|
||||||
executor.shutdown(wait=True)
|
self.success = SnatchQueue().process_results(searchResult)
|
||||||
except Exception, e:
|
if self.success:
|
||||||
|
break
|
||||||
|
|
||||||
|
except Exception:
|
||||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||||
|
|
||||||
if not didSearch:
|
if not didSearch:
|
||||||
|
@ -343,11 +210,8 @@ class BacklogQueueItem(generic_queue.QueueItem):
|
||||||
u"No NZB/Torrent providers found or enabled in your SickRage config. Please check your settings.",
|
u"No NZB/Torrent providers found or enabled in your SickRage config. Please check your settings.",
|
||||||
logger.ERROR)
|
logger.ERROR)
|
||||||
|
|
||||||
if BacklogSnatchQueue.empty():
|
if not self.success:
|
||||||
logger.log(u"No needed episodes found during backlog search")
|
logger.log(u"No needed episodes found during backlog search")
|
||||||
else:
|
|
||||||
# snatch all items in queue
|
|
||||||
scheduler.Scheduler(SnatchQueue(), silent=True, runOnce=True, queue=BacklogSnatchQueue).thread.start()
|
|
||||||
|
|
||||||
self.finish()
|
self.finish()
|
||||||
|
|
||||||
|
@ -356,8 +220,6 @@ class BacklogQueueItem(generic_queue.QueueItem):
|
||||||
|
|
||||||
# check through the list of statuses to see if we want any
|
# check through the list of statuses to see if we want any
|
||||||
for curStatusResult in statusResults:
|
for curStatusResult in statusResults:
|
||||||
time.sleep(1)
|
|
||||||
|
|
||||||
curCompositeStatus = int(curStatusResult["status"])
|
curCompositeStatus = int(curStatusResult["status"])
|
||||||
curStatus, curQuality = common.Quality.splitCompositeStatus(curCompositeStatus)
|
curStatus, curQuality = common.Quality.splitCompositeStatus(curCompositeStatus)
|
||||||
episode = int(curStatusResult["episode"])
|
episode = int(curStatusResult["episode"])
|
||||||
|
@ -380,7 +242,6 @@ class FailedQueueItem(generic_queue.QueueItem):
|
||||||
def __init__(self, show, episodes):
|
def __init__(self, show, episodes):
|
||||||
generic_queue.QueueItem.__init__(self, 'Retry', FAILED_SEARCH)
|
generic_queue.QueueItem.__init__(self, 'Retry', FAILED_SEARCH)
|
||||||
self.priority = generic_queue.QueuePriorities.HIGH
|
self.priority = generic_queue.QueuePriorities.HIGH
|
||||||
self.type = self.__class__.__name__
|
|
||||||
self.thread_name = 'RETRY-' + str(show.indexerid)
|
self.thread_name = 'RETRY-' + str(show.indexerid)
|
||||||
self.show = show
|
self.show = show
|
||||||
self.episodes = episodes
|
self.episodes = episodes
|
||||||
|
@ -389,13 +250,11 @@ class FailedQueueItem(generic_queue.QueueItem):
|
||||||
def execute(self):
|
def execute(self):
|
||||||
generic_queue.QueueItem.execute(self)
|
generic_queue.QueueItem.execute(self)
|
||||||
|
|
||||||
results = False
|
fs = []
|
||||||
didSearch = False
|
didSearch = False
|
||||||
|
|
||||||
episodes = []
|
episodes = []
|
||||||
|
|
||||||
for i, epObj in enumerate(episodes):
|
for i, epObj in enumerate(episodes):
|
||||||
time.sleep(1)
|
|
||||||
logger.log(
|
logger.log(
|
||||||
"Beginning failed download search for " + epObj.prettyName())
|
"Beginning failed download search for " + epObj.prettyName())
|
||||||
|
|
||||||
|
@ -412,14 +271,16 @@ class FailedQueueItem(generic_queue.QueueItem):
|
||||||
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive()]
|
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive()]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with ThreadPoolExecutor(sickbeard.NUM_OF_THREADS) as executor:
|
for provider in providers:
|
||||||
for provider in providers:
|
searchResult = search.searchProviders(self.show, self.episodes[0].season, self.episodes, provider,
|
||||||
didSearch = True
|
False, True)
|
||||||
executor.submit(
|
|
||||||
search.searchProviders, self, self.show, self.episodes[0].season, self.episodes, provider,
|
didSearch = True
|
||||||
False,
|
if searchResult:
|
||||||
True).add_done_callback(snatch_results)
|
self.success = SnatchQueue().process_results(searchResult)
|
||||||
executor.shutdown(wait=True)
|
if self.success:
|
||||||
|
break
|
||||||
|
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
logger.log(traceback.format_exc(), logger.DEBUG)
|
logger.log(traceback.format_exc(), logger.DEBUG)
|
||||||
|
|
||||||
|
@ -428,17 +289,7 @@ class FailedQueueItem(generic_queue.QueueItem):
|
||||||
u"No NZB/Torrent providers found or enabled in your SickRage config. Please check your settings.",
|
u"No NZB/Torrent providers found or enabled in your SickRage config. Please check your settings.",
|
||||||
logger.ERROR)
|
logger.ERROR)
|
||||||
|
|
||||||
if FailedSnatchQueue.empty():
|
if not self.success:
|
||||||
logger.log(u"No needed episodes found on the RSS feeds")
|
logger.log(u"No needed episodes found on the RSS feeds")
|
||||||
else:
|
|
||||||
# snatch all items in queue
|
|
||||||
scheduler.Scheduler(SnatchQueue(), silent=True, runOnce=True, queue=FailedSnatchQueue).thread.start()
|
|
||||||
|
|
||||||
self.finish()
|
self.finish()
|
||||||
|
|
||||||
|
|
||||||
# send to snatch queue
|
|
||||||
def snatch_results(f):
|
|
||||||
for result in f.result():
|
|
||||||
snatch_queue_item = SnatchQueueItem(result, result.queue_item)
|
|
||||||
SnatchQueue().add_item(snatch_queue_item)
|
|
|
@ -19,6 +19,7 @@
|
||||||
from __future__ import with_statement
|
from __future__ import with_statement
|
||||||
|
|
||||||
import traceback
|
import traceback
|
||||||
|
import threading
|
||||||
import Queue
|
import Queue
|
||||||
|
|
||||||
import sickbeard
|
import sickbeard
|
||||||
|
@ -31,18 +32,18 @@ from sickbeard import generic_queue
|
||||||
from sickbeard import name_cache
|
from sickbeard import name_cache
|
||||||
from sickbeard.exceptions import ex
|
from sickbeard.exceptions import ex
|
||||||
|
|
||||||
ShowItemQueue = Queue.PriorityQueue()
|
show_queue_lock = threading.Lock()
|
||||||
|
show_queue = Queue.Queue()
|
||||||
|
|
||||||
class ShowQueue(generic_queue.GenericQueue):
|
class ShowQueue(generic_queue.GenericQueue):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
|
||||||
generic_queue.GenericQueue.__init__(self)
|
generic_queue.GenericQueue.__init__(self)
|
||||||
self.queue_name = "SHOWQUEUE"
|
self.queue_name = "SHOWQUEUE"
|
||||||
self.queue = ShowItemQueue
|
self.queue = show_queue
|
||||||
|
|
||||||
|
|
||||||
def _isInQueue(self, show, actions):
|
def _isInQueue(self, show, actions):
|
||||||
return show in [x.show for x in self.queue.queue if x.action_id in actions] if self.queue.qsize() > 0 else []
|
return show in [x.show for x in self.queue.get() if x.action_id in actions] if not self.queue.empty() else []
|
||||||
|
|
||||||
def _isBeingSomethinged(self, show, actions):
|
def _isBeingSomethinged(self, show, actions):
|
||||||
return self.currentItem != None and show == self.currentItem.show and \
|
return self.currentItem != None and show == self.currentItem.show and \
|
||||||
|
@ -76,7 +77,8 @@ class ShowQueue(generic_queue.GenericQueue):
|
||||||
return self._isBeingSomethinged(show, (ShowQueueActions.SUBTITLE,))
|
return self._isBeingSomethinged(show, (ShowQueueActions.SUBTITLE,))
|
||||||
|
|
||||||
def _getLoadingShowList(self):
|
def _getLoadingShowList(self):
|
||||||
return [x for x in self.queue.queue + [self.currentItem] if x != None and x.isLoading] if self.queue.qsize() > 0 else []
|
return [x for x in self.queue.get() if x != None and x.isLoading] + [self.currentItem] if not self.queue.empty() else []
|
||||||
|
|
||||||
|
|
||||||
loadingShowList = property(_getLoadingShowList)
|
loadingShowList = property(_getLoadingShowList)
|
||||||
|
|
||||||
|
@ -180,8 +182,7 @@ class ShowQueueItem(generic_queue.QueueItem):
|
||||||
self.show = show
|
self.show = show
|
||||||
|
|
||||||
def isInQueue(self):
|
def isInQueue(self):
|
||||||
return self in sickbeard.showQueueScheduler.action.queue + [
|
return self in sickbeard.showQueueScheduler.action.queue.queue + [sickbeard.showQueueScheduler.action.currentItem]
|
||||||
sickbeard.showQueueScheduler.action.currentItem] #@UndefinedVariable
|
|
||||||
|
|
||||||
def _getName(self):
|
def _getName(self):
|
||||||
return str(self.show.indexerid)
|
return str(self.show.indexerid)
|
||||||
|
|
83
sickbeard/snatch_queue.py
Normal file
83
sickbeard/snatch_queue.py
Normal file
|
@ -0,0 +1,83 @@
|
||||||
|
import Queue
|
||||||
|
import threading
|
||||||
|
|
||||||
|
import sickbeard
|
||||||
|
from sickbeard import logger, search, generic_queue, ui
|
||||||
|
from sickbeard.common import Quality
|
||||||
|
|
||||||
|
snatch_queue_lock = threading.Lock()
|
||||||
|
|
||||||
|
class SnatchQueue(generic_queue.GenericQueue):
|
||||||
|
def __init__(self):
|
||||||
|
generic_queue.GenericQueue.__init__(self)
|
||||||
|
self.queue_name = "SNATCHQUEUE"
|
||||||
|
|
||||||
|
# snatch queues
|
||||||
|
self.ManualQueue = Queue.Queue()
|
||||||
|
self.BacklogQueue = Queue.Queue()
|
||||||
|
self.FailedQueue = Queue.Queue()
|
||||||
|
|
||||||
|
def is_in_queue(self, queue, show, episodes, quality):
|
||||||
|
for i, cur_item in enumerate(queue.queue):
|
||||||
|
if cur_item.results.show == show and cur_item.results.episodes.sort() == episodes.sort():
|
||||||
|
if cur_item.results.quality < quality:
|
||||||
|
queue.queue.pop(i)
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def add_item(self, item):
|
||||||
|
resultsKeep = []
|
||||||
|
for result in item.results:
|
||||||
|
show = result.extraInfo[0]
|
||||||
|
episodes = result.episodes
|
||||||
|
quality = result.quality
|
||||||
|
|
||||||
|
# check if we already have a item ready to snatch with same or better quality score
|
||||||
|
if not self.is_in_queue(self.queue, show, episodes, quality):
|
||||||
|
generic_queue.GenericQueue.add_item(self, item)
|
||||||
|
resultsKeep.append(result)
|
||||||
|
logger.log(
|
||||||
|
u"Adding item [" + result.name + "] to snatch queue",
|
||||||
|
logger.DEBUG)
|
||||||
|
else:
|
||||||
|
logger.log(
|
||||||
|
u"Not adding item [" + result.name + "] it's already in the queue with same or higher quality",
|
||||||
|
logger.DEBUG)
|
||||||
|
|
||||||
|
# update item with new results we want to snatch and disgard the rest
|
||||||
|
item.results = resultsKeep
|
||||||
|
|
||||||
|
def snatch_item(self, item):
|
||||||
|
for result in item.results:
|
||||||
|
# just use the first result for now
|
||||||
|
logger.log(u"Downloading " + result.name + " from " + result.provider.name)
|
||||||
|
status = search.snatchEpisode(result)
|
||||||
|
item.success = status
|
||||||
|
generic_queue.QueueItem.finish(item)
|
||||||
|
return status
|
||||||
|
|
||||||
|
def process_results(self, item):
|
||||||
|
# dynamically select our snatch queue
|
||||||
|
if isinstance(item, sickbeard.search_queue.ManualSearchQueueItem):
|
||||||
|
self.queue = self.ManualQueue
|
||||||
|
elif isinstance(item, sickbeard.search_queue.BacklogQueueItem):
|
||||||
|
self.queue = self.BacklogQueue
|
||||||
|
elif isinstance(item, sickbeard.search_queue.FailedQueueItem):
|
||||||
|
self.queue = self.FailedQueue
|
||||||
|
|
||||||
|
for result in item.results:
|
||||||
|
logger.log(u"Checking if we should snatch " + result.name, logger.DEBUG)
|
||||||
|
show_obj = result.episodes[0].show
|
||||||
|
any_qualities, best_qualities = Quality.splitQuality(show_obj.quality)
|
||||||
|
|
||||||
|
# if there is a redownload that's higher than this then we definitely need to keep looking
|
||||||
|
if best_qualities and result.quality == max(best_qualities):
|
||||||
|
return self.snatch_item(item)
|
||||||
|
|
||||||
|
# if there's no redownload that's higher (above) and this is the highest initial download then we're good
|
||||||
|
elif any_qualities and result.quality in any_qualities:
|
||||||
|
return self.snatch_item(item)
|
||||||
|
|
||||||
|
# Add item to queue if we couldn't find a match to snatch
|
||||||
|
self.add_item(item)
|
|
@ -15,6 +15,9 @@
|
||||||
#
|
#
|
||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
|
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from __future__ import with_statement
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
import time
|
import time
|
||||||
|
@ -23,23 +26,24 @@ import sqlite3
|
||||||
import urllib
|
import urllib
|
||||||
import urlparse
|
import urlparse
|
||||||
import re
|
import re
|
||||||
|
import threading
|
||||||
import sickbeard
|
import sickbeard
|
||||||
|
|
||||||
from shove import Shove
|
from lib.shove import Shove
|
||||||
from feedcache import cache
|
from lib.feedcache import cache
|
||||||
|
|
||||||
from sickbeard import db
|
from sickbeard import db
|
||||||
from sickbeard import logger
|
from sickbeard import logger
|
||||||
from sickbeard.common import Quality
|
from sickbeard.common import Quality
|
||||||
|
|
||||||
from sickbeard import helpers, show_name_helpers
|
from sickbeard import helpers, show_name_helpers
|
||||||
from sickbeard.exceptions import MultipleShowObjectsException, ex
|
from sickbeard.exceptions import MultipleShowObjectsException
|
||||||
from sickbeard.exceptions import AuthException
|
from sickbeard.exceptions import AuthException
|
||||||
from sickbeard import encodingKludge as ek
|
from sickbeard import encodingKludge as ek
|
||||||
|
|
||||||
from name_parser.parser import NameParser, InvalidNameException
|
from name_parser.parser import NameParser, InvalidNameException
|
||||||
|
|
||||||
|
cache_lock = threading.Lock()
|
||||||
|
|
||||||
class CacheDBConnection(db.DBConnection):
|
class CacheDBConnection(db.DBConnection):
|
||||||
def __init__(self, providerName):
|
def __init__(self, providerName):
|
||||||
|
@ -93,6 +97,40 @@ class TVCache():
|
||||||
def _checkItemAuth(self, title, url):
|
def _checkItemAuth(self, title, url):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
def updateCache(self):
|
||||||
|
if not self.shouldUpdate():
|
||||||
|
return
|
||||||
|
|
||||||
|
if self._checkAuth(None):
|
||||||
|
data = self._getRSSData()
|
||||||
|
|
||||||
|
# as long as the http request worked we count this as an update
|
||||||
|
if data:
|
||||||
|
self.setLastUpdate()
|
||||||
|
else:
|
||||||
|
return []
|
||||||
|
|
||||||
|
# now that we've loaded the current RSS feed lets delete the old cache
|
||||||
|
logger.log(u"Clearing " + self.provider.name + " cache and updating with new information")
|
||||||
|
self._clearCache()
|
||||||
|
|
||||||
|
if self._checkAuth(data):
|
||||||
|
items = data.entries
|
||||||
|
cl = []
|
||||||
|
for item in items:
|
||||||
|
ci = self._parseItem(item)
|
||||||
|
if ci is not None:
|
||||||
|
cl.append(ci)
|
||||||
|
|
||||||
|
myDB = self._getDB()
|
||||||
|
myDB.mass_action(cl)
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise AuthException(
|
||||||
|
u"Your authentication credentials for " + self.provider.name + " are incorrect, check your config")
|
||||||
|
|
||||||
|
return []
|
||||||
|
|
||||||
def getRSSFeed(self, url, post_data=None):
|
def getRSSFeed(self, url, post_data=None):
|
||||||
# create provider storaqe cache
|
# create provider storaqe cache
|
||||||
storage = Shove('sqlite:///' + ek.ek(os.path.join, sickbeard.CACHE_DIR, self.provider.name) + '.db')
|
storage = Shove('sqlite:///' + ek.ek(os.path.join, sickbeard.CACHE_DIR, self.provider.name) + '.db')
|
||||||
|
@ -121,50 +159,16 @@ class TVCache():
|
||||||
|
|
||||||
return f
|
return f
|
||||||
|
|
||||||
def updateCache(self):
|
|
||||||
|
|
||||||
if not self.shouldUpdate():
|
|
||||||
return
|
|
||||||
|
|
||||||
if self._checkAuth(None):
|
|
||||||
data = self._getRSSData()
|
|
||||||
|
|
||||||
# as long as the http request worked we count this as an update
|
|
||||||
if data:
|
|
||||||
self.setLastUpdate()
|
|
||||||
else:
|
|
||||||
return []
|
|
||||||
|
|
||||||
# now that we've loaded the current RSS feed lets delete the old cache
|
|
||||||
logger.log(u"Clearing " + self.provider.name + " cache and updating with new information")
|
|
||||||
self._clearCache()
|
|
||||||
|
|
||||||
if self._checkAuth(data):
|
|
||||||
items = data.entries
|
|
||||||
ql = []
|
|
||||||
for item in items:
|
|
||||||
qi = self._parseItem(item)
|
|
||||||
if qi is not None:
|
|
||||||
ql.append(qi)
|
|
||||||
|
|
||||||
if len(ql):
|
|
||||||
myDB = self._getDB()
|
|
||||||
myDB.mass_action(ql)
|
|
||||||
|
|
||||||
else:
|
|
||||||
raise AuthException(
|
|
||||||
u"Your authentication credentials for " + self.provider.name + " are incorrect, check your config")
|
|
||||||
|
|
||||||
return []
|
|
||||||
|
|
||||||
def _translateTitle(self, title):
|
def _translateTitle(self, title):
|
||||||
return title.replace(' ', '.')
|
return title.replace(' ', '.')
|
||||||
|
|
||||||
|
|
||||||
def _translateLinkURL(self, url):
|
def _translateLinkURL(self, url):
|
||||||
return url.replace('&', '&')
|
return url.replace('&', '&')
|
||||||
|
|
||||||
def _parseItem(self, item):
|
|
||||||
|
|
||||||
|
def _parseItem(self, item):
|
||||||
title = item.title
|
title = item.title
|
||||||
url = item.link
|
url = item.link
|
||||||
|
|
||||||
|
@ -197,8 +201,8 @@ class TVCache():
|
||||||
|
|
||||||
return datetime.datetime.fromtimestamp(lastTime)
|
return datetime.datetime.fromtimestamp(lastTime)
|
||||||
|
|
||||||
def setLastUpdate(self, toDate=None):
|
|
||||||
|
|
||||||
|
def setLastUpdate(self, toDate=None):
|
||||||
if not toDate:
|
if not toDate:
|
||||||
toDate = datetime.datetime.today()
|
toDate = datetime.datetime.today()
|
||||||
|
|
||||||
|
@ -207,8 +211,10 @@ class TVCache():
|
||||||
{'time': int(time.mktime(toDate.timetuple()))},
|
{'time': int(time.mktime(toDate.timetuple()))},
|
||||||
{'provider': self.providerID})
|
{'provider': self.providerID})
|
||||||
|
|
||||||
|
|
||||||
lastUpdate = property(_getLastUpdate)
|
lastUpdate = property(_getLastUpdate)
|
||||||
|
|
||||||
|
|
||||||
def shouldUpdate(self):
|
def shouldUpdate(self):
|
||||||
# if we've updated recently then skip the update
|
# if we've updated recently then skip the update
|
||||||
if datetime.datetime.today() - self.lastUpdate < datetime.timedelta(minutes=self.minTime):
|
if datetime.datetime.today() - self.lastUpdate < datetime.timedelta(minutes=self.minTime):
|
||||||
|
@ -218,13 +224,10 @@ class TVCache():
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def _addCacheEntry(self, name, url, quality=None):
|
|
||||||
|
|
||||||
cacheResult = sickbeard.name_cache.retrieveNameFromCache(name)
|
def _addCacheEntry(self, name, url, quality=None):
|
||||||
if cacheResult:
|
indexerid = None
|
||||||
logger.log(u"Found Indexer ID:[" + repr(cacheResult) + "], using that for [" + str(name) + "}",
|
in_cache = False
|
||||||
logger.DEBUG)
|
|
||||||
return None
|
|
||||||
|
|
||||||
# if we don't have complete info then parse the filename to get it
|
# if we don't have complete info then parse the filename to get it
|
||||||
try:
|
try:
|
||||||
|
@ -242,9 +245,34 @@ class TVCache():
|
||||||
logger.log(u"No series name retrieved from " + name + ", unable to cache it", logger.DEBUG)
|
logger.log(u"No series name retrieved from " + name + ", unable to cache it", logger.DEBUG)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
showObj = sickbeard.name_cache.retrieveShowFromCache(parse_result.series_name)
|
cacheResult = sickbeard.name_cache.retrieveNameFromCache(name)
|
||||||
|
if cacheResult:
|
||||||
|
in_cache = True
|
||||||
|
indexerid = int(cacheResult)
|
||||||
|
|
||||||
|
if not indexerid:
|
||||||
|
name_list = show_name_helpers.sceneToNormalShowNames(parse_result.series_name)
|
||||||
|
for cur_name in name_list:
|
||||||
|
if not indexerid:
|
||||||
|
for curShow in sickbeard.showList:
|
||||||
|
if show_name_helpers.isGoodResult(cur_name, curShow, False):
|
||||||
|
indexerid = int(curShow.indexerid)
|
||||||
|
break
|
||||||
|
|
||||||
|
if not indexerid:
|
||||||
|
# do a scene reverse-lookup to get a list of all possible names
|
||||||
|
scene_id = sickbeard.scene_exceptions.get_scene_exception_by_name(cur_name)
|
||||||
|
if scene_id:
|
||||||
|
indexerid = int(scene_id)
|
||||||
|
break
|
||||||
|
|
||||||
|
showObj = None
|
||||||
|
if indexerid:
|
||||||
|
logger.log(u"Found Indexer ID: [" + str(indexerid) + "], for [" + str(cur_name) + "}", logger.DEBUG)
|
||||||
|
showObj = helpers.findCertainShow(sickbeard.showList, indexerid)
|
||||||
|
|
||||||
if not showObj:
|
if not showObj:
|
||||||
logger.log(u"Show is not in our list of watched shows [" + parse_result.series_name + "], not caching ...", logger.DEBUG)
|
logger.log(u"No match for show: [" + parse_result.series_name + "], not caching ...", logger.DEBUG)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
season = episodes = None
|
season = episodes = None
|
||||||
|
@ -254,7 +282,7 @@ class TVCache():
|
||||||
airdate = parse_result.air_date.toordinal() or parse_result.sports_event_date.toordinal()
|
airdate = parse_result.air_date.toordinal() or parse_result.sports_event_date.toordinal()
|
||||||
sql_results = myDB.select(
|
sql_results = myDB.select(
|
||||||
"SELECT season, episode FROM tv_episodes WHERE showid = ? AND indexer = ? AND airdate = ?",
|
"SELECT season, episode FROM tv_episodes WHERE showid = ? AND indexer = ? AND airdate = ?",
|
||||||
[showObj.indexerid, showObj.indexer, airdate])
|
[indexerid, showObj.indexer, airdate])
|
||||||
if sql_results > 0:
|
if sql_results > 0:
|
||||||
season = int(sql_results[0]["season"])
|
season = int(sql_results[0]["season"])
|
||||||
episodes = [int(sql_results[0]["episode"])]
|
episodes = [int(sql_results[0]["episode"])]
|
||||||
|
@ -277,18 +305,21 @@ class TVCache():
|
||||||
name = unicode(name, 'utf-8')
|
name = unicode(name, 'utf-8')
|
||||||
|
|
||||||
logger.log(u"Added RSS item: [" + name + "] to cache: [" + self.providerID + "]", logger.DEBUG)
|
logger.log(u"Added RSS item: [" + name + "] to cache: [" + self.providerID + "]", logger.DEBUG)
|
||||||
sickbeard.name_cache.addNameToCache(name, showObj.indexerid)
|
|
||||||
|
if not in_cache:
|
||||||
|
sickbeard.name_cache.addNameToCache(name, indexerid)
|
||||||
|
|
||||||
return [
|
return [
|
||||||
"INSERT INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality) VALUES (?,?,?,?,?,?,?)",
|
"INSERT INTO [" + self.providerID + "] (name, season, episodes, indexerid, url, time, quality) VALUES (?,?,?,?,?,?,?)",
|
||||||
[name, season, episodeText, showObj.indexerid, url, curTimestamp, quality]]
|
[name, season, episodeText, indexerid, url, curTimestamp, quality]]
|
||||||
|
|
||||||
|
|
||||||
def searchCache(self, episode, manualSearch=False):
|
def searchCache(self, episode, manualSearch=False):
|
||||||
neededEps = self.findNeededEpisodes(episode, manualSearch)
|
neededEps = self.findNeededEpisodes(episode, manualSearch)
|
||||||
return neededEps
|
return neededEps
|
||||||
|
|
||||||
def listPropers(self, date=None, delimiter="."):
|
|
||||||
|
|
||||||
|
def listPropers(self, date=None, delimiter="."):
|
||||||
myDB = self._getDB()
|
myDB = self._getDB()
|
||||||
|
|
||||||
sql = "SELECT * FROM [" + self.providerID + "] WHERE name LIKE '%.PROPER.%' OR name LIKE '%.REPACK.%'"
|
sql = "SELECT * FROM [" + self.providerID + "] WHERE name LIKE '%.PROPER.%' OR name LIKE '%.REPACK.%'"
|
||||||
|
@ -298,6 +329,7 @@ class TVCache():
|
||||||
|
|
||||||
return filter(lambda x: x['indexerid'] != 0, myDB.select(sql))
|
return filter(lambda x: x['indexerid'] != 0, myDB.select(sql))
|
||||||
|
|
||||||
|
|
||||||
def findNeededEpisodes(self, epObj=None, manualSearch=False):
|
def findNeededEpisodes(self, epObj=None, manualSearch=False):
|
||||||
neededEps = {}
|
neededEps = {}
|
||||||
|
|
||||||
|
@ -319,7 +351,7 @@ class TVCache():
|
||||||
# get the show object, or if it's not one of our shows then ignore it
|
# get the show object, or if it's not one of our shows then ignore it
|
||||||
try:
|
try:
|
||||||
showObj = helpers.findCertainShow(sickbeard.showList, int(curResult["indexerid"]))
|
showObj = helpers.findCertainShow(sickbeard.showList, int(curResult["indexerid"]))
|
||||||
except (MultipleShowObjectsException):
|
except MultipleShowObjectsException:
|
||||||
showObj = None
|
showObj = None
|
||||||
|
|
||||||
if not showObj:
|
if not showObj:
|
||||||
|
@ -365,3 +397,4 @@ class TVCache():
|
||||||
neededEps[epObj].append(result)
|
neededEps[epObj].append(result)
|
||||||
|
|
||||||
return neededEps
|
return neededEps
|
||||||
|
|
||||||
|
|
|
@ -1368,14 +1368,12 @@ class CMD_SickBeardCheckScheduler(ApiCall):
|
||||||
|
|
||||||
backlogPaused = sickbeard.searchQueueScheduler.action.is_backlog_paused() #@UndefinedVariable
|
backlogPaused = sickbeard.searchQueueScheduler.action.is_backlog_paused() #@UndefinedVariable
|
||||||
backlogRunning = sickbeard.searchQueueScheduler.action.is_backlog_in_progress() #@UndefinedVariable
|
backlogRunning = sickbeard.searchQueueScheduler.action.is_backlog_in_progress() #@UndefinedVariable
|
||||||
searchStatus = sickbeard.currentSearchScheduler.action.amActive #@UndefinedVariable
|
|
||||||
nextSearch = str(sickbeard.currentSearchScheduler.timeLeft()).split('.')[0]
|
|
||||||
nextBacklog = sickbeard.backlogSearchScheduler.nextRun().strftime(dateFormat).decode(sickbeard.SYS_ENCODING)
|
nextBacklog = sickbeard.backlogSearchScheduler.nextRun().strftime(dateFormat).decode(sickbeard.SYS_ENCODING)
|
||||||
|
|
||||||
myDB.connection.close()
|
myDB.connection.close()
|
||||||
data = {"backlog_is_paused": int(backlogPaused), "backlog_is_running": int(backlogRunning),
|
data = {"backlog_is_paused": int(backlogPaused), "backlog_is_running": int(backlogRunning),
|
||||||
"last_backlog": _ordinal_to_dateForm(sqlResults[0]["last_backlog"]),
|
"last_backlog": _ordinal_to_dateForm(sqlResults[0]["last_backlog"]),
|
||||||
"search_is_running": int(searchStatus), "next_search": nextSearch, "next_backlog": nextBacklog}
|
"next_backlog": nextBacklog}
|
||||||
return _responds(RESULT_SUCCESS, data)
|
return _responds(RESULT_SUCCESS, data)
|
||||||
|
|
||||||
|
|
||||||
|
@ -1424,27 +1422,6 @@ class CMD_SickBeardDeleteRootDir(ApiCall):
|
||||||
return _responds(RESULT_SUCCESS, _getRootDirs(), msg="Root directory deleted")
|
return _responds(RESULT_SUCCESS, _getRootDirs(), msg="Root directory deleted")
|
||||||
|
|
||||||
|
|
||||||
class CMD_SickBeardForceSearch(ApiCall):
|
|
||||||
_help = {"desc": "force the episode search early"
|
|
||||||
}
|
|
||||||
|
|
||||||
def __init__(self, args, kwargs):
|
|
||||||
# required
|
|
||||||
# optional
|
|
||||||
# super, missing, help
|
|
||||||
ApiCall.__init__(self, args, kwargs)
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
""" force the episode search early """
|
|
||||||
# Changing all old missing episodes to status WANTED
|
|
||||||
# Beginning search for new episodes on RSS
|
|
||||||
# Searching all providers for any needed episodes
|
|
||||||
result = sickbeard.currentSearchScheduler.forceRun()
|
|
||||||
if result:
|
|
||||||
return _responds(RESULT_SUCCESS, msg="Episode search forced")
|
|
||||||
return _responds(RESULT_FAILURE, msg="Can not search for episode")
|
|
||||||
|
|
||||||
|
|
||||||
class CMD_SickBeardGetDefaults(ApiCall):
|
class CMD_SickBeardGetDefaults(ApiCall):
|
||||||
_help = {"desc": "get sickbeard user defaults"}
|
_help = {"desc": "get sickbeard user defaults"}
|
||||||
|
|
||||||
|
@ -2604,7 +2581,6 @@ _functionMaper = {"help": CMD_Help,
|
||||||
"sb.addrootdir": CMD_SickBeardAddRootDir,
|
"sb.addrootdir": CMD_SickBeardAddRootDir,
|
||||||
"sb.checkscheduler": CMD_SickBeardCheckScheduler,
|
"sb.checkscheduler": CMD_SickBeardCheckScheduler,
|
||||||
"sb.deleterootdir": CMD_SickBeardDeleteRootDir,
|
"sb.deleterootdir": CMD_SickBeardDeleteRootDir,
|
||||||
"sb.forcesearch": CMD_SickBeardForceSearch,
|
|
||||||
"sb.getdefaults": CMD_SickBeardGetDefaults,
|
"sb.getdefaults": CMD_SickBeardGetDefaults,
|
||||||
"sb.getmessages": CMD_SickBeardGetMessages,
|
"sb.getmessages": CMD_SickBeardGetMessages,
|
||||||
"sb.getrootdirs": CMD_SickBeardGetRootDirs,
|
"sb.getrootdirs": CMD_SickBeardGetRootDirs,
|
||||||
|
|
|
@ -204,24 +204,11 @@ class ManageSearches:
|
||||||
#t.backlogPI = sickbeard.backlogSearchScheduler.action.getProgressIndicator()
|
#t.backlogPI = sickbeard.backlogSearchScheduler.action.getProgressIndicator()
|
||||||
t.backlogPaused = sickbeard.searchQueueScheduler.action.is_backlog_paused() # @UndefinedVariable
|
t.backlogPaused = sickbeard.searchQueueScheduler.action.is_backlog_paused() # @UndefinedVariable
|
||||||
t.backlogRunning = sickbeard.searchQueueScheduler.action.is_backlog_in_progress() # @UndefinedVariable
|
t.backlogRunning = sickbeard.searchQueueScheduler.action.is_backlog_in_progress() # @UndefinedVariable
|
||||||
t.searchStatus = sickbeard.currentSearchScheduler.action.amActive # @UndefinedVariable
|
|
||||||
|
|
||||||
t.submenu = ManageMenu()
|
t.submenu = ManageMenu()
|
||||||
|
|
||||||
return _munge(t)
|
return _munge(t)
|
||||||
|
|
||||||
@cherrypy.expose
|
|
||||||
def forceSearch(self):
|
|
||||||
|
|
||||||
# force it to run the next time it looks
|
|
||||||
result = sickbeard.currentSearchScheduler.forceRun()
|
|
||||||
if result:
|
|
||||||
logger.log(u"Search forced")
|
|
||||||
ui.notifications.message('Episode search started',
|
|
||||||
'Note: RSS feeds may not be updated if retrieved recently')
|
|
||||||
|
|
||||||
redirect("/manage/manageSearches/")
|
|
||||||
|
|
||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
def pauseBacklog(self, paused=None):
|
def pauseBacklog(self, paused=None):
|
||||||
if paused == "1":
|
if paused == "1":
|
||||||
|
|
Loading…
Reference in a new issue