Fixed issues with multi-threading.

Fixed issues with snatching air-by-date shows.
Fixed issues with naming patterns.
Fixed issues with invalid literals.
Fixed issues with country codes missing and causing web errors.

Fixed so many issues I can't write them all done ... :)
This commit is contained in:
echel0n 2014-05-08 07:03:50 -07:00
parent 342be6cbae
commit 2a4f8780e2
33 changed files with 278 additions and 288 deletions

View file

@ -53,9 +53,11 @@
<a href="$sickbeard.indexerApi($show.indexer).config["show_url"]$show.indexerid" onclick="window.open(this.href, '_blank'); return false;" title="$sickbeard.indexerApi($show.indexer).config["show_url"]$show.indexerid"><img alt="$sickbeard.indexerApi($show.indexer).name" height="16" width="16" src="$sbRoot/images/$sickbeard.indexerApi($show.indexer).config["icon"] "style="margin-top: -1px;"/></a> <a href="$sickbeard.indexerApi($show.indexer).config["show_url"]$show.indexerid" onclick="window.open(this.href, '_blank'); return false;" title="$sickbeard.indexerApi($show.indexer).config["show_url"]$show.indexerid"><img alt="$sickbeard.indexerApi($show.indexer).name" height="16" width="16" src="$sbRoot/images/$sickbeard.indexerApi($show.indexer).config["icon"] "style="margin-top: -1px;"/></a>
</span> </span>
#else #else
#for $country in $show.imdb_info['country_codes'].split('|') #if 'country_codes' in $show.imdb_info:
<img src="$sbRoot/images/flags/${$country}.png" width="16" height="11" style="margin-top: 3px; margin-left: 3px" /> #for $country in $show.imdb_info['country_codes'].split('|')
#end for <img src="$sbRoot/images/flags/${$country}.png" width="16" height="11" style="margin-top: 3px; margin-left: 3px" />
#end for
#end if
($show.imdb_info['year']) - $show.imdb_info['runtimes'] min - $show.imdb_info['genres'].replace('|',' | ') ($show.imdb_info['year']) - $show.imdb_info['runtimes'] min - $show.imdb_info['genres'].replace('|',' | ')
<span class="tvshowLink" style="vertical-align: text-top"> <span class="tvshowLink" style="vertical-align: text-top">
<a href="http://www.imdb.com/title/$show.imdbid" rel="noreferrer" onclick="window.open('${sickbeard.ANON_REDIRECT}' + this.href, '_blank'); return false;" title="http://www.imdb.com/title/$show.imdbid"><img alt="[imdb]" height="16" width="16" src="$sbRoot/images/imdb.png" style="margin-top: -1px;"/> <a href="http://www.imdb.com/title/$show.imdbid" rel="noreferrer" onclick="window.open('${sickbeard.ANON_REDIRECT}' + this.href, '_blank'); return false;" title="http://www.imdb.com/title/$show.imdbid"><img alt="[imdb]" height="16" width="16" src="$sbRoot/images/imdb.png" style="margin-top: -1px;"/>

View file

@ -79,6 +79,7 @@ showUpdateScheduler = None
versionCheckScheduler = None versionCheckScheduler = None
showQueueScheduler = None showQueueScheduler = None
searchQueueScheduler = None searchQueueScheduler = None
snatchQueueScheduler = None
properFinderScheduler = None properFinderScheduler = None
autoPostProcesserScheduler = None autoPostProcesserScheduler = None
subtitlesFinderScheduler = None subtitlesFinderScheduler = None
@ -489,7 +490,7 @@ def initialize(consoleLogging=True):
USE_PUSHBULLET, PUSHBULLET_NOTIFY_ONSNATCH, PUSHBULLET_NOTIFY_ONDOWNLOAD, PUSHBULLET_NOTIFY_ONSUBTITLEDOWNLOAD, PUSHBULLET_API, PUSHBULLET_DEVICE, \ USE_PUSHBULLET, PUSHBULLET_NOTIFY_ONSNATCH, PUSHBULLET_NOTIFY_ONDOWNLOAD, PUSHBULLET_NOTIFY_ONSUBTITLEDOWNLOAD, PUSHBULLET_API, PUSHBULLET_DEVICE, \
versionCheckScheduler, VERSION_NOTIFY, AUTO_UPDATE, PROCESS_AUTOMATICALLY, UNPACK, \ versionCheckScheduler, VERSION_NOTIFY, AUTO_UPDATE, PROCESS_AUTOMATICALLY, UNPACK, \
KEEP_PROCESSED_DIR, PROCESS_METHOD, TV_DOWNLOAD_DIR, MIN_SEARCH_FREQUENCY, DEFAULT_UPDATE_FREQUENCY,MIN_UPDATE_FREQUENCY,UPDATE_FREQUENCY,\ KEEP_PROCESSED_DIR, PROCESS_METHOD, TV_DOWNLOAD_DIR, MIN_SEARCH_FREQUENCY, DEFAULT_UPDATE_FREQUENCY,MIN_UPDATE_FREQUENCY,UPDATE_FREQUENCY,\
showQueueScheduler, searchQueueScheduler, ROOT_DIRS, CACHE_DIR, ACTUAL_CACHE_DIR, \ showQueueScheduler, searchQueueScheduler, snatchQueueScheduler, ROOT_DIRS, CACHE_DIR, ACTUAL_CACHE_DIR, \
NAMING_PATTERN, NAMING_MULTI_EP, NAMING_FORCE_FOLDERS, NAMING_ABD_PATTERN, NAMING_CUSTOM_ABD, NAMING_SPORTS_PATTERN, NAMING_CUSTOM_SPORTS, NAMING_STRIP_YEAR, \ NAMING_PATTERN, NAMING_MULTI_EP, NAMING_FORCE_FOLDERS, NAMING_ABD_PATTERN, NAMING_CUSTOM_ABD, NAMING_SPORTS_PATTERN, NAMING_CUSTOM_SPORTS, NAMING_STRIP_YEAR, \
RENAME_EPISODES, properFinderScheduler, PROVIDER_ORDER, autoPostProcesserScheduler, \ RENAME_EPISODES, properFinderScheduler, PROVIDER_ORDER, autoPostProcesserScheduler, \
WOMBLE, OMGWTFNZBS, OMGWTFNZBS_USERNAME, OMGWTFNZBS_APIKEY, providerList, newznabProviderList, torrentRssProviderList, \ WOMBLE, OMGWTFNZBS, OMGWTFNZBS_USERNAME, OMGWTFNZBS_APIKEY, providerList, newznabProviderList, torrentRssProviderList, \
@ -1124,7 +1125,7 @@ def start():
def halt(): def halt():
global __INITIALIZED__, currentSearchScheduler, backlogSearchScheduler, showUpdateScheduler, \ global __INITIALIZED__, currentSearchScheduler, backlogSearchScheduler, showUpdateScheduler, \
showQueueScheduler, properFinderScheduler, autoPostProcesserScheduler, searchQueueScheduler, \ showQueueScheduler, properFinderScheduler, autoPostProcesserScheduler, searchQueueScheduler, \
subtitlesFinderScheduler, started, \ subtitlesFinderScheduler, snatchQueueScheduler, started, \
traktWatchListCheckerSchedular traktWatchListCheckerSchedular
with INIT_LOCK: with INIT_LOCK:
@ -1177,6 +1178,13 @@ def halt():
except: except:
pass pass
snatchQueueScheduler.abort = True
logger.log(u"Waiting for the SNATCHQUEUE thread to exit")
try:
snatchQueueScheduler.thread.join(10)
except:
pass
autoPostProcesserScheduler.abort = True autoPostProcesserScheduler.abort = True
logger.log(u"Waiting for the POSTPROCESSER thread to exit") logger.log(u"Waiting for the POSTPROCESSER thread to exit")
try: try:

View file

@ -56,7 +56,6 @@ def foldersAtPath(path, includeParent=False):
# walk up the tree until we find a valid path # walk up the tree until we find a valid path
while path and not os.path.isdir(path): while path and not os.path.isdir(path):
time.sleep(0.01)
if path == os.path.dirname(path): if path == os.path.dirname(path):
path = '' path = ''
break break

View file

@ -399,7 +399,6 @@ class ConfigMigrator():
sickbeard.CONFIG_VERSION = self.config_version sickbeard.CONFIG_VERSION = self.config_version
while self.config_version < self.expected_config_version: while self.config_version < self.expected_config_version:
time.sleep(0.01)
next_version = self.config_version + 1 next_version = self.config_version + 1
if next_version in self.migration_names: if next_version in self.migration_names:

View file

@ -79,7 +79,6 @@ class DBConnection:
attempt = 0 attempt = 0
while attempt < 5: while attempt < 5:
time.sleep(0.01)
try: try:
if args == None: if args == None:
logger.log(self.filename + ": " + query, logger.DB) logger.log(self.filename + ": " + query, logger.DB)
@ -119,7 +118,6 @@ class DBConnection:
attempt = 0 attempt = 0
while attempt < 5: while attempt < 5:
time.sleep(0.01)
try: try:
for qu in querylist: for qu in querylist:
if len(qu) == 1: if len(qu) == 1:
@ -164,7 +162,6 @@ class DBConnection:
attempt = 0 attempt = 0
while attempt < 5: while attempt < 5:
time.sleep(0.01)
try: try:
if args == None: if args == None:
logger.log(self.filename + ": " + query, logger.DB) logger.log(self.filename + ": " + query, logger.DB)

View file

@ -18,30 +18,22 @@
import datetime import datetime
import threading import threading
import Queue
from sickbeard import logger from sickbeard import logger
class QueuePriorities: class QueuePriorities:
LOW = 10 LOW = 10
NORMAL = 20 NORMAL = 20
HIGH = 30 HIGH = 30
class GenericQueue:
class GenericQueue(object):
def __init__(self): def __init__(self):
self.currentItem = None self.currentItem = None
self.queue = []
self.thread = None self.thread = None
self.queue_name = "QUEUE" self.queue_name = "QUEUE"
self.min_priority = 0 self.min_priority = 0
self.queue = Queue.PriorityQueue()
self.currentItem = None
def pause(self): def pause(self):
logger.log(u"Pausing queue") logger.log(u"Pausing queue")
@ -53,11 +45,13 @@ class GenericQueue(object):
def add_item(self, item): def add_item(self, item):
item.added = datetime.datetime.now() item.added = datetime.datetime.now()
self.queue.append(item) self.queue.put(item, item.priority)
return item return item
def run(self): def run(self, queue=None):
# dynamically set queue
if queue:
self.queue = queue
# only start a new task if one isn't already going # only start a new task if one isn't already going
if self.thread == None or self.thread.isAlive() == False: if self.thread == None or self.thread.isAlive() == False:
@ -67,55 +61,25 @@ class GenericQueue(object):
self.currentItem.finish() self.currentItem.finish()
self.currentItem = None self.currentItem = None
# if there's something in the queue then run it in a thread and take it out of the queue if not self.queue.empty():
if len(self.queue) > 0: queueItem = self.queue.get()
# sort by priority
def sorter(x, y):
"""
Sorts by priority descending then time ascending
"""
if x.priority == y.priority:
if y.added == x.added:
return 0
elif y.added < x.added:
return 1
elif y.added > x.added:
return -1
else:
return y.priority - x.priority
self.queue.sort(cmp=sorter)
queueItem = self.queue[0]
if queueItem.priority < self.min_priority: if queueItem.priority < self.min_priority:
return return
# launch the queue item in a thread
# TODO: improve thread name
threadName = self.queue_name + '-' + queueItem.get_thread_name() threadName = self.queue_name + '-' + queueItem.get_thread_name()
self.thread = threading.Thread(None, queueItem.execute, threadName) self.thread = threading.Thread(None, queueItem.execute, threadName)
self.thread.start() self.thread.start()
self.currentItem = queueItem self.currentItem = queueItem
# take it out of the queue
del self.queue[0]
class QueueItem: class QueueItem:
def __init__(self, name, action_id=0): def __init__(self, name, action_id=0):
self.name = name self.name = name
self.inProgress = False self.inProgress = False
self.priority = QueuePriorities.NORMAL self.priority = QueuePriorities.NORMAL
self.thread_name = None self.thread_name = None
self.action_id = action_id self.action_id = action_id
self.added = None self.added = None
def get_thread_name(self): def get_thread_name(self):
@ -132,6 +96,4 @@ class QueueItem:
def finish(self): def finish(self):
"""Implementing Classes should call this""" """Implementing Classes should call this"""
self.inProgress = False self.inProgress = False

View file

@ -551,8 +551,6 @@ def delete_empty_folders(check_empty_dir, keep_dir=None):
# as long as the folder exists and doesn't contain any files, delete it # as long as the folder exists and doesn't contain any files, delete it
while ek.ek(os.path.isdir, check_empty_dir) and check_empty_dir != keep_dir: while ek.ek(os.path.isdir, check_empty_dir) and check_empty_dir != keep_dir:
time.sleep(0.01)
check_files = ek.ek(os.listdir, check_empty_dir) check_files = ek.ek(os.listdir, check_empty_dir)
if not check_files or (len(check_files) <= len(ignore_items) and all( if not check_files or (len(check_files) <= len(ignore_items) and all(
@ -794,7 +792,6 @@ def backupVersionedFile(old_file, version):
new_file = old_file + '.' + 'v' + str(version) new_file = old_file + '.' + 'v' + str(version)
while not ek.ek(os.path.isfile, new_file): while not ek.ek(os.path.isfile, new_file):
time.sleep(0.01)
if not ek.ek(os.path.isfile, old_file): if not ek.ek(os.path.isfile, old_file):
logger.log(u"Not creating backup, " + old_file + " doesn't exist", logger.DEBUG) logger.log(u"Not creating backup, " + old_file + " doesn't exist", logger.DEBUG)
break break

View file

@ -18,6 +18,7 @@
from __future__ import with_statement from __future__ import with_statement
import time
import os import os
import sys import sys
import threading import threading

View file

@ -212,8 +212,8 @@ class NameParser(object):
i = result = 0 i = result = 0
for integer, numeral in numeral_map: for integer, numeral in numeral_map:
time.sleep(0.01)
while n[i:i + len(numeral)] == numeral: while n[i:i + len(numeral)] == numeral:
time.sleep(1)
result += integer result += integer
i += len(numeral) i += len(numeral)

View file

@ -170,7 +170,6 @@ def update_network_dict():
# list of sql commands to update the network_timezones table # list of sql commands to update the network_timezones table
ql = [] ql = []
for cur_d, cur_t in d.iteritems(): for cur_d, cur_t in d.iteritems():
time.sleep(0.01)
h_k = old_d.has_key(cur_d) h_k = old_d.has_key(cur_d)
if h_k and cur_t != old_d[cur_d]: if h_k and cur_t != old_d[cur_d]:
# update old record # update old record

View file

@ -85,10 +85,9 @@ class ProperFinder():
# if they haven't been added by a different provider than add the proper to the list # if they haven't been added by a different provider than add the proper to the list
for x in curPropers: for x in curPropers:
time.sleep(0.01)
showObj = helpers.findCertainShow(sickbeard.showList, x.indexerid) showObj = helpers.findCertainShow(sickbeard.showList, x.indexerid)
if not showObj: if not showObj:
logger.log(u"Unable to find the show we watch with indexerID " + str(x.indexerid), logger.ERROR) logger.log(u"Unable to find the show in our watch list " + str(x.name), logger.DEBUG)
continue continue
name = self._genericName(x.name) name = self._genericName(x.name)

View file

@ -321,7 +321,7 @@ class BTNCache(tvcache.TVCache):
# By now we know we've got data and no auth errors, all we need to do is put it in the database # By now we know we've got data and no auth errors, all we need to do is put it in the database
cl = [] cl = []
for item in data: for item in data:
time.sleep(0.01)
ci = self._parseItem(item) ci = self._parseItem(item)
if ci is not None: if ci is not None:
cl.append(ci) cl.append(ci)

View file

@ -240,18 +240,15 @@ class GenericProvider:
self.cache.updateCache() self.cache.updateCache()
for epObj in episodes: for epObj in episodes:
time.sleep(0.01)
cacheResult = self.cache.searchCache(epObj, manualSearch) cacheResult = self.cache.searchCache(epObj, manualSearch)
if len(cacheResult): if len(cacheResult):
results.update(cacheResult) results.update(cacheResult)
continue continue
if epObj.show.air_by_date: logger.log(
logger.log(u'Searching "%s" for "%s"' % (self.name, epObj.prettyABDName())) u'Searching "%s" for "%s" as "%s"' % (self.name, epObj.prettyName(), epObj.prettySceneName()))
else:
logger.log(
u'Searching "%s" for "%s" as "%s"' % (self.name, epObj.prettyName(), epObj.prettySceneName()))
if seasonSearch: if seasonSearch:
for curString in self._get_season_search_strings(epObj): for curString in self._get_season_search_strings(epObj):
@ -273,7 +270,7 @@ class GenericProvider:
for episode, items in searchItems.items(): for episode, items in searchItems.items():
for item in items: for item in items:
time.sleep(0.01)
(title, url) = self._get_title_and_url(item) (title, url) = self._get_title_and_url(item)
@ -335,7 +332,7 @@ class GenericProvider:
logger.log( logger.log(
u"Ignoring result " + title + " because we don't want an episode that is " + Quality.qualityStrings[ u"Ignoring result " + title + " because we don't want an episode that is " + Quality.qualityStrings[
quality], logger.DEBUG) quality], logger.DEBUG)
time.sleep(0.01)
continue continue
logger.log(u"Found result " + title + " at " + url, logger.DEBUG) logger.log(u"Found result " + title + " at " + url, logger.DEBUG)
@ -351,6 +348,7 @@ class GenericProvider:
result.quality = quality result.quality = quality
result.provider = self result.provider = self
result.content = None result.content = None
result.extraInfo = [show]
if len(epObj) == 1: if len(epObj) == 1:
epNum = epObj[0].episode epNum = epObj[0].episode
@ -361,7 +359,6 @@ class GenericProvider:
parse_result.episode_numbers), logger.DEBUG) parse_result.episode_numbers), logger.DEBUG)
elif len(epObj) == 0: elif len(epObj) == 0:
epNum = SEASON_RESULT epNum = SEASON_RESULT
result.extraInfo = [show]
logger.log(u"Separating full season result to check for later", logger.DEBUG) logger.log(u"Separating full season result to check for later", logger.DEBUG)
if not result: if not result:

View file

@ -207,7 +207,7 @@ class HDBitsCache(tvcache.TVCache):
ql = [] ql = []
for item in items: for item in items:
time.sleep(0.01)
ci = self._parseItem(item) ci = self._parseItem(item)
if ci is not None: if ci is not None:
ql.append(ci) ql.append(ci)

View file

@ -349,7 +349,7 @@ class HDTorrentsCache(tvcache.TVCache):
cl = [] cl = []
for result in rss_results: for result in rss_results:
time.sleep(0.01)
item = (result[0], result[1]) item = (result[0], result[1])
ci = self._parseItem(item) ci = self._parseItem(item)
if ci is not None: if ci is not None:

View file

@ -295,7 +295,7 @@ class IPTorrentsCache(tvcache.TVCache):
cl = [] cl = []
for result in rss_results: for result in rss_results:
time.sleep(0.01)
item = (result[0], result[1]) item = (result[0], result[1])
ci = self._parseItem(item) ci = self._parseItem(item)
if ci is not None: if ci is not None:

View file

@ -424,7 +424,7 @@ class KATCache(tvcache.TVCache):
cl = [] cl = []
for result in rss_results: for result in rss_results:
time.sleep(0.01)
item = (result[0], result[1]) item = (result[0], result[1])
ci = self._parseItem(item) ci = self._parseItem(item)
if ci is not None: if ci is not None:

View file

@ -173,7 +173,7 @@ class NewznabProvider(generic.NZBProvider):
return True return True
def _doSearch(self, search_params, show=None, max_age=0): def _doSearch(self, search_params, epcount=0, age=0):
self._checkAuth() self._checkAuth()
@ -183,8 +183,8 @@ class NewznabProvider(generic.NZBProvider):
"cat": self.catIDs} "cat": self.catIDs}
# if max_age is set, use it, don't allow it to be missing # if max_age is set, use it, don't allow it to be missing
if max_age or not params['maxage']: if age or not params['maxage']:
params['maxage'] = max_age params['maxage'] = age
if search_params: if search_params:
params.update(search_params) params.update(search_params)
@ -229,7 +229,7 @@ class NewznabProvider(generic.NZBProvider):
cache_results] cache_results]
for term in search_terms: for term in search_terms:
for item in self._doSearch({'q': term}, max_age=4): for item in self._doSearch({'q': term}, age=4):
(title, url) = self._get_title_and_url(item) (title, url) = self._get_title_and_url(item)

View file

@ -344,7 +344,7 @@ class NextGenCache(tvcache.TVCache):
cl = [] cl = []
for result in rss_results: for result in rss_results:
time.sleep(0.01)
item = (result[0], result[1]) item = (result[0], result[1])
ci = self._parseItem(item) ci = self._parseItem(item)
if ci is not None: if ci is not None:

View file

@ -316,7 +316,7 @@ class PublicHDCache(tvcache.TVCache):
ql = [] ql = []
for result in rss_results: for result in rss_results:
time.sleep(0.01)
item = (result[0], result[1]) item = (result[0], result[1])
ci = self._parseItem(item) ci = self._parseItem(item)
if ci is not None: if ci is not None:

View file

@ -322,7 +322,7 @@ class SCCCache(tvcache.TVCache):
cl = [] cl = []
for result in rss_results: for result in rss_results:
time.sleep(0.01)
item = (result[0], result[1]) item = (result[0], result[1])
ci = self._parseItem(item) ci = self._parseItem(item)
if ci is not None: if ci is not None:

View file

@ -274,7 +274,7 @@ class SpeedCDCache(tvcache.TVCache):
ql = [] ql = []
for result in rss_results: for result in rss_results:
time.sleep(0.01)
item = (result[0], result[1]) item = (result[0], result[1])
ci = self._parseItem(item) ci = self._parseItem(item)
if ci is not None: if ci is not None:

View file

@ -415,7 +415,7 @@ class ThePirateBayCache(tvcache.TVCache):
cl = [] cl = []
for result in rss_results: for result in rss_results:
time.sleep(0.01)
item = (result[0], result[1]) item = (result[0], result[1])
ci = self._parseItem(item) ci = self._parseItem(item)
if ci is not None: if ci is not None:

View file

@ -296,7 +296,7 @@ class TorrentDayCache(tvcache.TVCache):
cl = [] cl = []
for result in rss_results: for result in rss_results:
time.sleep(0.01)
item = (result[0], result[1]) item = (result[0], result[1])
ci = self._parseItem(item) ci = self._parseItem(item)
if ci is not None: if ci is not None:

View file

@ -295,7 +295,7 @@ class TorrentLeechCache(tvcache.TVCache):
cl = [] cl = []
for result in rss_results: for result in rss_results:
time.sleep(0.01)
item = (result[0], result[1]) item = (result[0], result[1])
ci = self._parseItem(item) ci = self._parseItem(item)
if ci is not None: if ci is not None:

View file

@ -27,7 +27,7 @@ from sickbeard.exceptions import ex
class Scheduler: class Scheduler:
def __init__(self, action, cycleTime=datetime.timedelta(minutes=10), runImmediately=True, def __init__(self, action, cycleTime=datetime.timedelta(minutes=10), runImmediately=True,
threadName="ScheduledThread", silent=False): threadName="ScheduledThread", silent=False, runOnce=False, queue=None):
if runImmediately: if runImmediately:
self.lastRun = datetime.datetime.fromordinal(1) self.lastRun = datetime.datetime.fromordinal(1)
@ -44,6 +44,8 @@ class Scheduler:
self.initThread() self.initThread()
self.abort = False self.abort = False
self.runOnce = runOnce
self.queue = queue
def initThread(self): def initThread(self):
if self.thread == None or not self.thread.isAlive(): if self.thread == None or not self.thread.isAlive():
@ -61,8 +63,7 @@ class Scheduler:
def runAction(self): def runAction(self):
while True: while True:
time.sleep(0.01) time.sleep(1)
currentTime = datetime.datetime.now() currentTime = datetime.datetime.now()
if currentTime - self.lastRun > self.cycleTime: if currentTime - self.lastRun > self.cycleTime:
@ -70,12 +71,17 @@ class Scheduler:
try: try:
if not self.silent: if not self.silent:
logger.log(u"Starting new thread: " + self.threadName, logger.DEBUG) logger.log(u"Starting new thread: " + self.threadName, logger.DEBUG)
self.action.run()
# check if we want to pass in our queue dynamically
if self.queue:
self.action.run(self.queue)
else:
self.action.run()
except Exception, e: except Exception, e:
logger.log(u"Exception generated in thread " + self.threadName + ": " + ex(e), logger.ERROR) logger.log(u"Exception generated in thread " + self.threadName + ": " + ex(e), logger.ERROR)
logger.log(repr(traceback.format_exc()), logger.DEBUG) logger.log(repr(traceback.format_exc()), logger.DEBUG)
if self.abort: if self.abort or self.runOnce:
self.abort = False self.abort = False
self.thread = None self.thread = None
return return

View file

@ -21,6 +21,7 @@ from __future__ import with_statement
import os import os
import re import re
import threading import threading
import Queue
import traceback import traceback
import datetime import datetime
@ -111,11 +112,6 @@ def snatchEpisode(result, endStatus=SNATCHED):
if result is None: return False if result is None: return False
# don't notify when we re-download an episode
for curEpObj in result.episodes:
if curEpObj.status in Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST:
return 2
result.priority = 0 # -1 = low, 0 = normal, 1 = high result.priority = 0 # -1 = low, 0 = normal, 1 = high
if sickbeard.ALLOW_HIGH_PRIORITY: if sickbeard.ALLOW_HIGH_PRIORITY:
# if it aired recently make it high priority # if it aired recently make it high priority
@ -363,8 +359,9 @@ def filterSearchResults(show, results):
return foundResults return foundResults
def searchProviders(show, season, episodes, curProvider, seasonSearch=False, manualSearch=False): def searchProviders(queueItem, show, season, episodes, curProvider, seasonSearch=False, manualSearch=False):
threading.currentThread().name = curProvider.name thread_name = str(curProvider.name).upper() + '-' + str(show.indexerid)
threading.currentThread().name = thread_name
logger.log(u"Searching for stuff we need from " + show.name + " season " + str(season)) logger.log(u"Searching for stuff we need from " + show.name + " season " + str(season))
foundResults = {} foundResults = {}
@ -392,7 +389,7 @@ def searchProviders(show, season, episodes, curProvider, seasonSearch=False, man
curResults = filterSearchResults(show, curResults) curResults = filterSearchResults(show, curResults)
if len(curResults): if len(curResults):
foundResults.update(curResults) foundResults.update(curResults)
logger.log(u"Provider search results: " + str(foundResults), logger.DEBUG) logger.log(u"Provider search results: " + repr(foundResults), logger.DEBUG)
if not len(foundResults): if not len(foundResults):
return [] return []
@ -407,6 +404,7 @@ def searchProviders(show, season, episodes, curProvider, seasonSearch=False, man
highest_quality_overall = 0 highest_quality_overall = 0
for cur_episode in foundResults: for cur_episode in foundResults:
for cur_result in foundResults[cur_episode]: for cur_result in foundResults[cur_episode]:
cur_result.queue_item = queueItem
if cur_result.quality != Quality.UNKNOWN and cur_result.quality > highest_quality_overall: if cur_result.quality != Quality.UNKNOWN and cur_result.quality > highest_quality_overall:
highest_quality_overall = cur_result.quality highest_quality_overall = cur_result.quality
logger.log(u"The highest quality of any match is " + Quality.qualityStrings[highest_quality_overall], logger.DEBUG) logger.log(u"The highest quality of any match is " + Quality.qualityStrings[highest_quality_overall], logger.DEBUG)
@ -574,4 +572,4 @@ def searchProviders(show, season, episodes, curProvider, seasonSearch=False, man
finalResults.append(pickBestResult(foundResults[curEp], show)) finalResults.append(pickBestResult(foundResults[curEp], show))
return finalResults return finalResults

View file

@ -125,7 +125,8 @@ class BacklogSearcher:
backlog_queue_item = search_queue.BacklogQueueItem(curShow, cur_segment) backlog_queue_item = search_queue.BacklogQueueItem(curShow, cur_segment)
if backlog_queue_item.wantedEpisodes: if backlog_queue_item.wantedEpisodes:
sickbeard.searchQueueScheduler.action.add_item(backlog_queue_item) #@UndefinedVariable backlog_queue_item = search_queue.BacklogQueueItem(curShow, cur_segment)
#sickbeard.searchQueueScheduler.action.add_item(backlog_queue_item) #@UndefinedVariable
else: else:
logger.log( logger.log(
u"Nothing in season " + str(cur_segment) + " needs to be downloaded, skipping this season", u"Nothing in season " + str(cur_segment) + " needs to be downloaded, skipping this season",

View file

@ -19,36 +19,114 @@
from __future__ import with_statement from __future__ import with_statement
import datetime import datetime
from threading import Thread import Queue
import threading
import time import time
import traceback
import sickbeard import sickbeard
from sickbeard import db, logger, common, exceptions, helpers from sickbeard import db, logger, common, exceptions, helpers
from sickbeard import generic_queue from sickbeard import generic_queue, scheduler
from sickbeard import search, failed_history, history from sickbeard import search, failed_history, history
from sickbeard import ui from sickbeard import ui
from lib.concurrent import futures from lib.concurrent import futures
from lib.concurrent.futures.thread import ThreadPoolExecutor
BACKLOG_SEARCH = 10 BACKLOG_SEARCH = 10
RSS_SEARCH = 20 RSS_SEARCH = 20
FAILED_SEARCH = 30 FAILED_SEARCH = 30
MANUAL_SEARCH = 30 MANUAL_SEARCH = 30
SNATCH = 40
# snatch queues
ManualSnatchQueue = Queue.PriorityQueue()
RSSSnatchQueue = Queue.PriorityQueue()
BacklogSnatchQueue = Queue.PriorityQueue()
FailedSnatchQueue = Queue.PriorityQueue()
SearchItemQueue = Queue.PriorityQueue()
class SnatchQueue(generic_queue.GenericQueue):
def __init__(self):
generic_queue.GenericQueue.__init__(self)
self.queue_name = "SNATCHQUEUE"
def is_in_queue(self, show, episodes, quality):
for cur_item in self.queue.queue:
if cur_item.results.extraInfo[0] == show \
and cur_item.results.episodes.sort() == episodes.sort() \
and cur_item.results.quality >= quality:
return True
return False
def add_item(self, item):
# dynamically select our snatch queue
if item.type == 'RSSSearchQueueItem':
self.queue = RSSSnatchQueue
elif item.type == 'ManualSearchQueueItem':
self.queue = ManualSnatchQueue
elif item.type == 'BacklogQueueItem':
self.queue = BacklogSnatchQueue
elif item.type == 'FailedQueueItem':
self.queue = FailedSnatchQueue
else:
return
# check if we already have a item ready to snatch with same or better quality score
if not self.is_in_queue(item.results.extraInfo[0], item.results.episodes, item.results.quality):
generic_queue.GenericQueue.add_item(self, item)
else:
logger.log(
u"Not adding item [" + item.results.name + "] it's already in the queue with same or higher quality",
logger.DEBUG)
class SnatchQueueItem(generic_queue.QueueItem):
def __init__(self, results, queue_item):
generic_queue.QueueItem.__init__(self, 'Snatch', SNATCH)
self.priority = generic_queue.QueuePriorities.HIGH
self.thread_name = 'SNATCH-' + str(results.extraInfo[0].indexerid)
self.results = results
self.success = None
self.queue_item = queue_item
self.type = queue_item.type
def execute(self):
generic_queue.QueueItem.execute(self)
# just use the first result for now
logger.log(u"Downloading " + self.results.name + " from " + self.results.provider.name)
result = search.snatchEpisode(self.results)
if self.type == "ManualSearchQueueItem":
providerModule = self.results.provider
if not result:
ui.notifications.error(
'Error while attempting to snatch ' + self.results.name + ', check your logs')
elif providerModule == None:
ui.notifications.error('Provider is configured incorrectly, unable to download')
self.success = result
self.queue_item.success = result
generic_queue.QueueItem.finish(self.queue_item)
generic_queue.QueueItem.finish(self)
class SearchQueue(generic_queue.GenericQueue): class SearchQueue(generic_queue.GenericQueue):
def __init__(self): def __init__(self):
generic_queue.GenericQueue.__init__(self) generic_queue.GenericQueue.__init__(self)
self.queue_name = "SEARCHQUEUE" self.queue_name = "SEARCHQUEUE"
self.queue = SearchItemQueue
def is_in_queue(self, show, segment): def is_in_queue(self, show, segment):
for cur_item in self.queue: for cur_item in self.queue.queue:
if isinstance(cur_item, BacklogQueueItem) and cur_item.show == show and cur_item.segment == segment: if isinstance(cur_item, BacklogQueueItem) and cur_item.show == show and cur_item.segment == segment:
return True return True
return False return False
def is_ep_in_queue(self, ep_obj): def is_ep_in_queue(self, ep_obj):
for cur_item in self.queue: for cur_item in self.queue.queue:
if isinstance(cur_item, ManualSearchQueueItem) and cur_item.ep_obj == ep_obj: if isinstance(cur_item, ManualSearchQueueItem) and cur_item.ep_obj == ep_obj:
return True return True
return False return False
@ -70,6 +148,7 @@ class SearchQueue(generic_queue.GenericQueue):
return False return False
def add_item(self, item): def add_item(self, item):
if isinstance(item, RSSSearchQueueItem): if isinstance(item, RSSSearchQueueItem):
generic_queue.GenericQueue.add_item(self, item) generic_queue.GenericQueue.add_item(self, item)
elif isinstance(item, BacklogQueueItem) and not self.is_in_queue(item.show, item.segment): elif isinstance(item, BacklogQueueItem) and not self.is_in_queue(item.show, item.segment):
@ -86,124 +165,85 @@ class ManualSearchQueueItem(generic_queue.QueueItem):
def __init__(self, ep_obj): def __init__(self, ep_obj):
generic_queue.QueueItem.__init__(self, 'Manual Search', MANUAL_SEARCH) generic_queue.QueueItem.__init__(self, 'Manual Search', MANUAL_SEARCH)
self.priority = generic_queue.QueuePriorities.HIGH self.priority = generic_queue.QueuePriorities.HIGH
self.ep_obj = ep_obj self.type = self.__class__.__name__
self.thread_name = 'MANUAL-' + str(ep_obj.show.indexerid)
self.success = None self.success = None
self.show = ep_obj.show
self.ep_obj = ep_obj
def execute(self): def execute(self):
generic_queue.QueueItem.execute(self) generic_queue.QueueItem.execute(self)
foundResults = [] fs = []
didSearch = False didSearch = False
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive()] providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive()]
try: try:
with futures.ThreadPoolExecutor(sickbeard.NUM_OF_THREADS) as executor: with ThreadPoolExecutor(sickbeard.NUM_OF_THREADS) as executor:
foundResults = list( for provider in providers:
executor.map(self.process, providers)) didSearch = True
didSearch = True logger.log("Beginning manual search for [" + self.ep_obj.prettyName() + "] on " + provider.name)
executor.submit(
search.searchProviders, self, self.show, self.ep_obj.season, [self.ep_obj], provider, False,
True).add_done_callback(snatch_results)
executor.shutdown(wait=True)
except Exception, e: except Exception, e:
pass logger.log(traceback.format_exc(), logger.DEBUG)
if not didSearch: if not didSearch:
logger.log( logger.log(
u"No NZB/Torrent providers found or enabled in your SickRage config. Please check your settings.", u"No NZB/Torrent providers found or enabled in your SickRage config. Please check your settings.",
logger.ERROR) logger.ERROR)
result = False if ManualSnatchQueue.empty():
if not len(foundResults): ui.notifications.message('No downloads were found',
if self.ep_obj.show.air_by_date: "Couldn't find a download for <i>%s</i>" % self.ep_obj.prettyName())
ui.notifications.message('No downloads were found ...', logger.log(u"Unable to find a download for " + self.ep_obj.prettyName())
"Couldn't find a download for <i>%s</i>" % self.ep_obj.prettyABName())
logger.log(u"Unable to find a download for " + self.ep_obj.prettyABDName())
else:
ui.notifications.message('No downloads were found ...',
"Couldn't find a download for <i>%s</i>" % self.ep_obj.prettyName())
logger.log(u"Unable to find a download for " + self.ep_obj.prettyName())
self.success = result
else: else:
for foundResult in [item for sublist in foundResults for item in sublist]: # snatch all items in queue
time.sleep(0.01) scheduler.Scheduler(SnatchQueue(), silent=True, runOnce=True, queue=ManualSnatchQueue).thread.start()
result = search.snatchEpisode(foundResult) generic_queue.QueueItem.finish(self)
# duplicate snatch detected due to multithreading
if result == 2:
continue
providerModule = foundResult.provider
if not result:
ui.notifications.error(
'Error while attempting to snatch ' + foundResult.name + ', check your logs')
elif providerModule == None:
ui.notifications.error('Provider is configured incorrectly, unable to download')
# just use the first result for now
logger.log(u"Downloading " + foundResult.name + " from " + foundResult.provider.name)
self.success = result
def process(self, curProvider):
if self.ep_obj.show.air_by_date:
logger.log("Beginning manual search for " + self.ep_obj.prettyABDName())
else:
logger.log("Beginning manual search for " + self.ep_obj.prettyName())
return search.searchProviders(self.ep_obj.show, self.ep_obj.season, [self.ep_obj], curProvider, False, True)
def finish(self):
# don't let this linger if something goes wrong
if self.success == None:
self.success = False
else:
generic_queue.QueueItem.finish(self)
class RSSSearchQueueItem(generic_queue.QueueItem): class RSSSearchQueueItem(generic_queue.QueueItem):
def __init__(self): def __init__(self):
generic_queue.QueueItem.__init__(self, 'RSS Search', RSS_SEARCH) generic_queue.QueueItem.__init__(self, 'RSS Search', RSS_SEARCH)
self.thread_name = 'RSSFEED'
self.type = self.__class__.__name__
def execute(self): def execute(self):
generic_queue.QueueItem.execute(self) generic_queue.QueueItem.execute(self)
foundResults = [] results = False
didSearch = False didSearch = False
self._changeMissingEpisodes()
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive()] providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive()]
try: try:
with futures.ThreadPoolExecutor(sickbeard.NUM_OF_THREADS) as executor: with ThreadPoolExecutor(sickbeard.NUM_OF_THREADS) as executor:
foundResults = list( for provider in providers:
executor.map(self.process, providers)) didSearch = True
logger.log("Beginning RSS Feed search on " + provider.name)
didSearch = True executor.submit(search.searchForNeededEpisodes, provider).add_done_callback(snatch_results)
executor.shutdown(wait=True)
except: except:
pass logger.log(traceback.format_exc(), logger.DEBUG)
if not didSearch: if not didSearch:
logger.log( logger.log(
u"No NZB/Torrent providers found or enabled in the sickbeard config. Please check your settings.", u"No NZB/Torrent providers found or enabled in your SickRage config. Please check your settings.",
logger.ERROR) logger.ERROR)
if len(foundResults): if RSSSnatchQueue.empty():
for curResult in [item for sublist in foundResults for item in sublist]: logger.log(u"No needed episodes found on the RSS feeds")
time.sleep(0.01)
result = search.snatchEpisode(curResult)
# duplicate snatch detected due to multithreading
if result == 2:
continue
else: else:
logger.log(u"RSS Feed search found nothing to snatch ...") # snatch all items in queue
scheduler.Scheduler(SnatchQueue(), silent=True, runOnce=True, queue=RSSSnatchQueue).thread.start()
generic_queue.QueueItem.finish(self) generic_queue.QueueItem.finish(self)
def process(self, curProvider):
self._changeMissingEpisodes()
logger.log(u"Beginning search for new episodes on RSS feeds and in cache")
return search.searchForNeededEpisodes(curProvider)
def _changeMissingEpisodes(self): def _changeMissingEpisodes(self):
logger.log(u"Changing all old missing episodes to status WANTED") logger.log(u"Changing all old missing episodes to status WANTED")
@ -240,6 +280,7 @@ class BacklogQueueItem(generic_queue.QueueItem):
def __init__(self, show, segment): def __init__(self, show, segment):
generic_queue.QueueItem.__init__(self, 'Backlog', BACKLOG_SEARCH) generic_queue.QueueItem.__init__(self, 'Backlog', BACKLOG_SEARCH)
self.priority = generic_queue.QueuePriorities.LOW self.priority = generic_queue.QueuePriorities.LOW
self.type = self.__class__.__name__
self.thread_name = 'BACKLOG-' + str(show.indexerid) self.thread_name = 'BACKLOG-' + str(show.indexerid)
self.show = show self.show = show
@ -274,53 +315,48 @@ class BacklogQueueItem(generic_queue.QueueItem):
def execute(self): def execute(self):
generic_queue.QueueItem.execute(self) generic_queue.QueueItem.execute(self)
foundResults = [] results = False
didSearch = False didSearch = False
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive()]
try:
with futures.ThreadPoolExecutor(sickbeard.NUM_OF_THREADS) as executor:
foundResults = list(executor.map(self.process,providers))
didSearch = True
except:
pass
if not didSearch:
logger.log(
u"No NZB/Torrent providers found or enabled in the sickbeard config. Please check your settings.",
logger.ERROR)
if len(foundResults):
for curResult in [item for sublist in foundResults for item in sublist]:
time.sleep(0.01)
result = search.snatchEpisode(curResult)
# duplicate snatch detected due to multithreading
if result == 2:
continue
else:
logger.log(u"Backlog search found nothing to snatch ...")
self.finish()
def process(self, curProvider):
# check if we want to search for season packs instead of just season/episode # check if we want to search for season packs instead of just season/episode
seasonSearch = False seasonSearch = False
seasonEps = self.show.getAllEpisodes(self.segment) seasonEps = self.show.getAllEpisodes(self.segment)
if len(seasonEps) == len(self.wantedEpisodes): if len(seasonEps) == len(self.wantedEpisodes):
seasonSearch = True seasonSearch = True
return search.searchProviders(self.show, self.segment, self.wantedEpisodes, curProvider, seasonSearch, False) providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive()]
try:
with ThreadPoolExecutor(sickbeard.NUM_OF_THREADS) as executor:
for provider in providers:
didSearch = True
logger.log("Beginning backlog search for [" + self.segment + "] on " + provider.name)
executor.submit(
search.searchProviders, self, self.show, self.segment, self.wantedEpisodes, provider,
seasonSearch, False).add_done_callback(snatch_results)
executor.shutdown(wait=True)
except Exception, e:
logger.log(traceback.format_exc(), logger.DEBUG)
if not didSearch:
logger.log(
u"No NZB/Torrent providers found or enabled in your SickRage config. Please check your settings.",
logger.ERROR)
if BacklogSnatchQueue.empty():
logger.log(u"No needed episodes found during backlog search")
else:
# snatch all items in queue
scheduler.Scheduler(SnatchQueue(), silent=True, runOnce=True, queue=BacklogSnatchQueue).thread.start()
self.finish()
def _need_any_episodes(self, statusResults, bestQualities): def _need_any_episodes(self, statusResults, bestQualities):
wantedEpisodes = [] wantedEpisodes = []
# check through the list of statuses to see if we want any # check through the list of statuses to see if we want any
for curStatusResult in statusResults: for curStatusResult in statusResults:
time.sleep(0.01) time.sleep(1)
curCompositeStatus = int(curStatusResult["status"]) curCompositeStatus = int(curStatusResult["status"])
curStatus, curQuality = common.Quality.splitCompositeStatus(curCompositeStatus) curStatus, curQuality = common.Quality.splitCompositeStatus(curCompositeStatus)
@ -344,61 +380,24 @@ class FailedQueueItem(generic_queue.QueueItem):
def __init__(self, show, episodes): def __init__(self, show, episodes):
generic_queue.QueueItem.__init__(self, 'Retry', FAILED_SEARCH) generic_queue.QueueItem.__init__(self, 'Retry', FAILED_SEARCH)
self.priority = generic_queue.QueuePriorities.HIGH self.priority = generic_queue.QueuePriorities.HIGH
self.type = self.__class__.__name__
self.thread_name = 'RETRY-' + str(show.indexerid) self.thread_name = 'RETRY-' + str(show.indexerid)
self.show = show self.show = show
self.episodes = episodes self.episodes = episodes
self.success = None self.success = None
def execute(self): def execute(self):
generic_queue.QueueItem.execute(self) generic_queue.QueueItem.execute(self)
foundResults = [] results = False
didSearch = False didSearch = False
providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive()]
try:
with futures.ThreadPoolExecutor(sickbeard.NUM_OF_THREADS) as executor:
foundResults = list(
executor.map(self.process, providers))
didSearch = True
except:
pass
if not didSearch:
logger.log(
u"No NZB/Torrent providers found or enabled in the sickbeard config. Please check your settings.",
logger.ERROR)
if len(foundResults):
for curResult in [item for sublist in foundResults for item in sublist]:
time.sleep(0.01)
result = search.snatchEpisode(curResult)
# duplicate snatch detected due to multithreading
if result == 2:
continue
self.success = result
else:
logger.log(u"Retry failed download search found nothing to snatch ...")
self.finish()
def process(self, curProvider):
episodes = [] episodes = []
for i, epObj in enumerate(episodes): for i, epObj in enumerate(episodes):
time.sleep(0.01) time.sleep(1)
logger.log(
if epObj.show.air_by_date: "Beginning failed download search for " + epObj.prettyName())
logger.log("Beginning manual search for " + epObj.prettyABDName())
else:
logger.log(
"Beginning failed download search for " + epObj.prettyName())
(release, provider) = failed_history.findRelease(self.show, epObj.season, epObj.episode) (release, provider) = failed_history.findRelease(self.show, epObj.season, epObj.episode)
if release: if release:
@ -410,4 +409,36 @@ class FailedQueueItem(generic_queue.QueueItem):
failed_history.revertEpisode(self.show, epObj.season, epObj.episode) failed_history.revertEpisode(self.show, epObj.season, epObj.episode)
episodes.append(epObj) episodes.append(epObj)
return search.searchProviders(self.show, self.episodes[0].season, self.episodes, curProvider, False, False) providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive()]
try:
with ThreadPoolExecutor(sickbeard.NUM_OF_THREADS) as executor:
for provider in providers:
didSearch = True
executor.submit(
search.searchProviders, self, self.show, self.episodes[0].season, self.episodes, provider,
False,
True).add_done_callback(snatch_results)
executor.shutdown(wait=True)
except Exception, e:
logger.log(traceback.format_exc(), logger.DEBUG)
if not didSearch:
logger.log(
u"No NZB/Torrent providers found or enabled in your SickRage config. Please check your settings.",
logger.ERROR)
if FailedSnatchQueue.empty():
logger.log(u"No needed episodes found on the RSS feeds")
else:
# snatch all items in queue
scheduler.Scheduler(SnatchQueue(), silent=True, runOnce=True, queue=FailedSnatchQueue).thread.start()
self.finish()
# send to snatch queue
def snatch_results(f):
for result in f.result():
snatch_queue_item = SnatchQueueItem(result, result.queue_item)
SnatchQueue().add_item(snatch_queue_item)

View file

@ -19,6 +19,7 @@
from __future__ import with_statement from __future__ import with_statement
import traceback import traceback
import Queue
import sickbeard import sickbeard
@ -30,16 +31,18 @@ from sickbeard import generic_queue
from sickbeard import name_cache from sickbeard import name_cache
from sickbeard.exceptions import ex from sickbeard.exceptions import ex
ShowItemQueue = Queue.PriorityQueue()
class ShowQueue(generic_queue.GenericQueue): class ShowQueue(generic_queue.GenericQueue):
def __init__(self): def __init__(self):
generic_queue.GenericQueue.__init__(self) generic_queue.GenericQueue.__init__(self)
self.queue_name = "SHOWQUEUE" self.queue_name = "SHOWQUEUE"
self.queue = ShowItemQueue
def _isInQueue(self, show, actions): def _isInQueue(self, show, actions):
return show in [x.show for x in self.queue if x.action_id in actions] return show in [x.show for x in self.queue.queue if x.action_id in actions] if self.queue.qsize() > 0 else []
def _isBeingSomethinged(self, show, actions): def _isBeingSomethinged(self, show, actions):
return self.currentItem != None and show == self.currentItem.show and \ return self.currentItem != None and show == self.currentItem.show and \
@ -73,7 +76,7 @@ class ShowQueue(generic_queue.GenericQueue):
return self._isBeingSomethinged(show, (ShowQueueActions.SUBTITLE,)) return self._isBeingSomethinged(show, (ShowQueueActions.SUBTITLE,))
def _getLoadingShowList(self): def _getLoadingShowList(self):
return [x for x in self.queue + [self.currentItem] if x != None and x.isLoading] return [x for x in self.queue.queue + [self.currentItem] if x != None and x.isLoading] if self.queue.qsize() > 0 else []
loadingShowList = property(_getLoadingShowList) loadingShowList = property(_getLoadingShowList)

View file

@ -441,7 +441,6 @@ class TVShow(object):
sql_l = [] sql_l = []
for season in showObj: for season in showObj:
time.sleep(0.01)
scannedEps[season] = {} scannedEps[season] = {}
for episode in showObj[season]: for episode in showObj[season]:
# need some examples of wtf episode 0 means to decide if we want it or not # need some examples of wtf episode 0 means to decide if we want it or not
@ -1732,18 +1731,10 @@ class TVEpisode(object):
Returns: A string representing the episode's name and season/ep numbers Returns: A string representing the episode's name and season/ep numbers
""" """
if self.show.air_by_date:
return self._format_pattern('%SN - %Sx%0E - %EN') return self._format_pattern('%SN - %AD - %EN')
else:
def prettyABDName(self): return self._format_pattern('%SN - %Sx%0E - %EN')
"""
Returns the name of this episode in a "pretty" human-readable format. Used for logging
and notifications and such.
Returns: A string representing the episode's name and season/ep numbers
"""
return self._format_pattern('%SN - %AD - %EN')
def prettySceneName(self): def prettySceneName(self):
""" """
@ -1752,8 +1743,10 @@ class TVEpisode(object):
Returns: A string representing the episode's name and season/ep numbers Returns: A string representing the episode's name and season/ep numbers
""" """
if self.show.air_by_date:
return self._format_pattern('%SN - %XSx%0XE - %EN') return self._format_pattern('%SN - %AD - %EN')
else:
return self._format_pattern('%SN - %XSx%0XE - %EN')
def _ep_name(self): def _ep_name(self):
""" """
@ -1851,13 +1844,13 @@ class TVEpisode(object):
'%Q.N': dot(Quality.qualityStrings[epQual]), '%Q.N': dot(Quality.qualityStrings[epQual]),
'%Q_N': us(Quality.qualityStrings[epQual]), '%Q_N': us(Quality.qualityStrings[epQual]),
'%S': str(self.season), '%S': str(self.season),
'%0S': '%02d' % int(self.season) if not self.show.air_by_date else self.season, '%0S': '%02d' % self.season,
'%E': str(self.episode), '%E': str(self.episode),
'%0E': '%02d' % int(self.episode)if not self.show.air_by_date else self.episode, '%0E': '%02d' % self.episode,
'%XS': str(self.scene_season), '%XS': str(self.scene_season),
'%0XS': '%02d' % int(self.scene_season), '%0XS': '%02d' % self.scene_season,
'%XE': str(self.scene_episode), '%XE': str(self.scene_episode),
'%0XE': '%02d' % int(self.scene_episode), '%0XE': '%02d' % self.scene_episode,
'%RN': release_name(self.release_name), '%RN': release_name(self.release_name),
'%RG': release_group(self.release_name), '%RG': release_group(self.release_name),
'%AD': str(self.airdate).replace('-', ' '), '%AD': str(self.airdate).replace('-', ' '),

View file

@ -143,7 +143,6 @@ class TVCache():
items = data.entries items = data.entries
ql = [] ql = []
for item in items: for item in items:
time.sleep(0.01)
qi = self._parseItem(item) qi = self._parseItem(item)
if qi is not None: if qi is not None:
ql.append(qi) ql.append(qi)

View file

@ -3153,7 +3153,6 @@ class Home:
sql_l = [] sql_l = []
for curEp in eps.split('|'): for curEp in eps.split('|'):
time.sleep(0.01)
logger.log(u"Attempting to set status on episode " + curEp + " to " + status, logger.DEBUG) logger.log(u"Attempting to set status on episode " + curEp + " to " + status, logger.DEBUG)