mirror of
https://github.com/SickGear/SickGear.git
synced 2025-01-05 17:43:37 +00:00
Change to simplify and reduce logging output of Recent-Search and Backlog processes.
This commit is contained in:
parent
d5fb963b73
commit
e6b7feef3d
6 changed files with 97 additions and 60 deletions
|
@ -33,6 +33,7 @@
|
|||
* Change to no longer require restart with the "Scan and post process" option on page config/Post Processing
|
||||
* Add validation when using Release Group token on page config Post Processing/Episode Naming/Name pattern/Custom
|
||||
* Change IMDb updater to a toggleable option under General Settings (disabled by default due to slow operations)
|
||||
* Change to simplify and reduce logging output of Recent-Search and Backlog processes
|
||||
|
||||
[develop changelog]
|
||||
* Fix traceback error when using the menu item Manage/Update Kodi
|
||||
|
|
|
@ -1520,6 +1520,11 @@ def get_size(start_path='.'):
|
|||
def remove_article(text=''):
|
||||
return re.sub(r'(?i)^(?:(?:A(?!\s+to)n?)|The)\s(\w)', r'\1', text)
|
||||
|
||||
|
||||
def maybe_plural(number=1):
|
||||
return ('s', '')[1 == number]
|
||||
|
||||
|
||||
def build_dict(seq, key):
|
||||
return dict((d[key], dict(d, index=index)) for (index, d) in enumerate(seq))
|
||||
|
||||
|
|
|
@ -310,6 +310,7 @@ def isFirstBestMatch(result):
|
|||
|
||||
return False
|
||||
|
||||
|
||||
def wantedEpisodes(show, fromDate):
|
||||
anyQualities, bestQualities = common.Quality.splitQuality(show.quality) # @UnusedVariable
|
||||
allQualities = list(set(anyQualities + bestQualities))
|
||||
|
@ -327,6 +328,7 @@ def wantedEpisodes(show, fromDate):
|
|||
|
||||
# check through the list of statuses to see if we want any
|
||||
wanted = []
|
||||
total_wanted = total_replacing = 0
|
||||
for result in sqlResults:
|
||||
curCompositeStatus = int(result["status"])
|
||||
curStatus, curQuality = common.Quality.splitCompositeStatus(curCompositeStatus)
|
||||
|
@ -340,14 +342,25 @@ def wantedEpisodes(show, fromDate):
|
|||
if (curStatus in (common.DOWNLOADED, common.SNATCHED, common.SNATCHED_PROPER,
|
||||
common.SNATCHED_BEST) and curQuality < highestBestQuality) or curStatus == common.WANTED:
|
||||
|
||||
if curStatus == common.WANTED:
|
||||
total_wanted += 1
|
||||
else:
|
||||
total_replacing += 1
|
||||
|
||||
epObj = show.getEpisode(int(result["season"]), int(result["episode"]))
|
||||
epObj.wantedQuality = [i for i in allQualities if (i > curQuality and i != common.Quality.UNKNOWN)]
|
||||
wanted.append(epObj)
|
||||
|
||||
logger.log(u'We want %d episode(s) of %s' % (len(wanted), show.name))
|
||||
if 0 < total_wanted + total_replacing:
|
||||
actions = []
|
||||
for msg, total in ['%d episode%s', total_wanted], ['to upgrade %d episode%s', total_replacing]:
|
||||
if 0 < total:
|
||||
actions.append(msg % (total, helpers.maybe_plural(total)))
|
||||
logger.log(u'We want %s for %s' % (' and '.join(actions), show.name))
|
||||
|
||||
return wanted
|
||||
|
||||
|
||||
def searchForNeededEpisodes(episodes):
|
||||
foundResults = {}
|
||||
|
||||
|
|
|
@ -23,7 +23,7 @@ import threading
|
|||
|
||||
import sickbeard
|
||||
|
||||
from sickbeard import db, scheduler
|
||||
from sickbeard import db, scheduler, helpers
|
||||
from sickbeard import search_queue
|
||||
from sickbeard import logger
|
||||
from sickbeard import ui
|
||||
|
@ -107,7 +107,7 @@ class BacklogSearcher:
|
|||
backlog_queue_item = search_queue.BacklogQueueItem(curShow, segment)
|
||||
sickbeard.searchQueueScheduler.action.add_item(backlog_queue_item) # @UndefinedVariable
|
||||
else:
|
||||
logger.log(u"Nothing needs to be downloaded for " + str(curShow.name) + ", skipping",logger.DEBUG)
|
||||
logger.log(u'Nothing needs to be downloaded for %s, skipping' % str(curShow.name), logger.DEBUG)
|
||||
|
||||
# don't consider this an actual backlog search if we only did recent eps
|
||||
# or if we only did certain shows
|
||||
|
@ -139,8 +139,6 @@ class BacklogSearcher:
|
|||
def _get_segments(self, show, fromDate):
|
||||
anyQualities, bestQualities = common.Quality.splitQuality(show.quality) # @UnusedVariable
|
||||
|
||||
logger.log(u"Seeing if we need anything from " + show.name)
|
||||
|
||||
myDB = db.DBConnection()
|
||||
if show.air_by_date:
|
||||
sqlResults = myDB.select(
|
||||
|
@ -153,6 +151,7 @@ class BacklogSearcher:
|
|||
|
||||
# check through the list of statuses to see if we want any
|
||||
wanted = {}
|
||||
total_wanted = total_replacing = 0
|
||||
for result in sqlResults:
|
||||
curCompositeStatus = int(result["status"])
|
||||
curStatus, curQuality = common.Quality.splitCompositeStatus(curCompositeStatus)
|
||||
|
@ -166,12 +165,24 @@ class BacklogSearcher:
|
|||
if (curStatus in (common.DOWNLOADED, common.SNATCHED, common.SNATCHED_PROPER,
|
||||
common.SNATCHED_BEST) and curQuality < highestBestQuality) or curStatus == common.WANTED:
|
||||
|
||||
if curStatus == common.WANTED:
|
||||
total_wanted += 1
|
||||
else:
|
||||
total_replacing += 1
|
||||
|
||||
epObj = show.getEpisode(int(result["season"]), int(result["episode"]))
|
||||
if epObj.season not in wanted:
|
||||
wanted[epObj.season] = [epObj]
|
||||
else:
|
||||
wanted[epObj.season].append(epObj)
|
||||
|
||||
if 0 < total_wanted + total_replacing:
|
||||
actions = []
|
||||
for msg, total in ['%d episode%s', total_wanted], ['to upgrade %d episode%s', total_replacing]:
|
||||
if 0 < total:
|
||||
actions.append(msg % (total, helpers.maybe_plural(total)))
|
||||
logger.log(u'We want %s for %s' % (' and '.join(actions), show.name))
|
||||
|
||||
return wanted
|
||||
|
||||
def _set_lastBacklog(self, when):
|
||||
|
@ -186,7 +197,6 @@ class BacklogSearcher:
|
|||
else:
|
||||
myDB.action("UPDATE info SET last_backlog=" + str(when))
|
||||
|
||||
|
||||
def run(self):
|
||||
try:
|
||||
self.searchBacklog()
|
||||
|
|
|
@ -153,18 +153,21 @@ class RecentSearchQueueItem(generic_queue.QueueItem):
|
|||
logger.log(u'No search of cache for episodes required')
|
||||
self.success = True
|
||||
else:
|
||||
logger.log(u'Found a total of %d episode(s) requiring searching' % len(self.episodes))
|
||||
num_shows = len(set([ep.show.name for ep in self.episodes]))
|
||||
logger.log(u'Found %d needed episode%s spanning %d show%s'
|
||||
% (len(self.episodes), helpers.maybe_plural(len(self.episodes)),
|
||||
num_shows, helpers.maybe_plural(num_shows)))
|
||||
|
||||
try:
|
||||
logger.log(u'Beginning recent search for episodes')
|
||||
foundResults = search.searchForNeededEpisodes(self.episodes)
|
||||
found_results = search.searchForNeededEpisodes(self.episodes)
|
||||
|
||||
if not len(foundResults):
|
||||
if not len(found_results):
|
||||
logger.log(u'No needed episodes found')
|
||||
else:
|
||||
for result in foundResults:
|
||||
for result in found_results:
|
||||
# just use the first result for now
|
||||
logger.log(u'Downloading ' + result.name + ' from ' + result.provider.name)
|
||||
logger.log(u'Downloading %s from %s' % (result.name, result.provider.name))
|
||||
self.success = search.snatchEpisode(result)
|
||||
|
||||
# give the CPU a break
|
||||
|
@ -195,7 +198,7 @@ class RecentSearchQueueItem(generic_queue.QueueItem):
|
|||
[common.UNAIRED, curDate])
|
||||
|
||||
sql_l = []
|
||||
show = None
|
||||
wanted = show = None
|
||||
|
||||
for sqlEp in sqlResults:
|
||||
try:
|
||||
|
@ -212,27 +215,29 @@ class RecentSearchQueueItem(generic_queue.QueueItem):
|
|||
|
||||
try:
|
||||
end_time = network_timezones.parse_date_time(sqlEp['airdate'], show.airs, show.network) + datetime.timedelta(minutes=helpers.tryInt(show.runtime, 60))
|
||||
# filter out any episodes that haven't aried yet
|
||||
# filter out any episodes that haven't aired yet
|
||||
if end_time > curTime:
|
||||
continue
|
||||
except:
|
||||
# if an error occured assume the episode hasn't aired yet
|
||||
# if an error occurred assume the episode hasn't aired yet
|
||||
continue
|
||||
|
||||
ep = show.getEpisode(int(sqlEp['season']), int(sqlEp['episode']))
|
||||
with ep.lock:
|
||||
if ep.show.paused:
|
||||
ep.status = common.SKIPPED
|
||||
else:
|
||||
ep.status = common.WANTED
|
||||
|
||||
sql_l.append(ep.get_sql())
|
||||
# Now that it is time, change state of UNAIRED show into expected or skipped
|
||||
ep.status = (common.WANTED, common.SKIPPED)[ep.show.paused]
|
||||
result = ep.get_sql()
|
||||
if None is not result:
|
||||
sql_l.append(ep.get_sql())
|
||||
wanted |= (False, True)[common.WANTED == ep.status]
|
||||
else:
|
||||
logger.log(u'No new released episodes found ...')
|
||||
logger.log(u'No unaired episodes marked wanted')
|
||||
|
||||
if len(sql_l) > 0:
|
||||
if 0 < len(sql_l):
|
||||
myDB = db.DBConnection()
|
||||
myDB.mass_action(sql_l)
|
||||
if wanted:
|
||||
logger.log(u'Found new episodes marked wanted')
|
||||
|
||||
@staticmethod
|
||||
def update_providers():
|
||||
|
@ -253,6 +258,8 @@ class RecentSearchQueueItem(generic_queue.QueueItem):
|
|||
for t in threads:
|
||||
t.join()
|
||||
|
||||
logger.log('Finished updating provider caches')
|
||||
|
||||
|
||||
class ManualSearchQueueItem(generic_queue.QueueItem):
|
||||
def __init__(self, show, segment):
|
||||
|
|
|
@ -745,103 +745,104 @@ class TVShow(object):
|
|||
|
||||
def loadFromDB(self, skipNFO=False):
|
||||
|
||||
logger.log(str(self.indexerid) + u": Loading show info from database")
|
||||
|
||||
myDB = db.DBConnection()
|
||||
sqlResults = myDB.select("SELECT * FROM tv_shows WHERE indexer_id = ?", [self.indexerid])
|
||||
|
||||
if len(sqlResults) > 1:
|
||||
logger.log(str(self.indexerid) + u': Loading show info from database')
|
||||
raise exceptions.MultipleDBShowsException()
|
||||
elif len(sqlResults) == 0:
|
||||
logger.log(str(self.indexerid) + ": Unable to find the show in the database")
|
||||
logger.log(str(self.indexerid) + ': Unable to find the show in the database')
|
||||
return
|
||||
else:
|
||||
if not self.indexer:
|
||||
self.indexer = int(sqlResults[0]["indexer"])
|
||||
self.indexer = int(sqlResults[0]['indexer'])
|
||||
if not self.name:
|
||||
self.name = sqlResults[0]["show_name"]
|
||||
self.name = sqlResults[0]['show_name']
|
||||
if not self.network:
|
||||
self.network = sqlResults[0]["network"]
|
||||
self.network = sqlResults[0]['network']
|
||||
if not self.genre:
|
||||
self.genre = sqlResults[0]["genre"]
|
||||
self.genre = sqlResults[0]['genre']
|
||||
if self.classification is None:
|
||||
self.classification = sqlResults[0]["classification"]
|
||||
self.classification = sqlResults[0]['classification']
|
||||
|
||||
self.runtime = sqlResults[0]["runtime"]
|
||||
self.runtime = sqlResults[0]['runtime']
|
||||
|
||||
self.status = sqlResults[0]["status"]
|
||||
self.status = sqlResults[0]['status']
|
||||
if not self.status:
|
||||
self.status = ""
|
||||
self.airs = sqlResults[0]["airs"]
|
||||
self.status = ''
|
||||
self.airs = sqlResults[0]['airs']
|
||||
if not self.airs:
|
||||
self.airs = ""
|
||||
self.startyear = sqlResults[0]["startyear"]
|
||||
self.airs = ''
|
||||
self.startyear = sqlResults[0]['startyear']
|
||||
if not self.startyear:
|
||||
self.startyear = 0
|
||||
|
||||
self.air_by_date = sqlResults[0]["air_by_date"]
|
||||
self.air_by_date = sqlResults[0]['air_by_date']
|
||||
if not self.air_by_date:
|
||||
self.air_by_date = 0
|
||||
|
||||
self.anime = sqlResults[0]["anime"]
|
||||
if self.anime == None:
|
||||
self.anime = sqlResults[0]['anime']
|
||||
if None is self.anime:
|
||||
self.anime = 0
|
||||
|
||||
self.sports = sqlResults[0]["sports"]
|
||||
self.sports = sqlResults[0]['sports']
|
||||
if not self.sports:
|
||||
self.sports = 0
|
||||
|
||||
self.scene = sqlResults[0]["scene"]
|
||||
self.scene = sqlResults[0]['scene']
|
||||
if not self.scene:
|
||||
self.scene = 0
|
||||
|
||||
self.subtitles = sqlResults[0]["subtitles"]
|
||||
self.subtitles = sqlResults[0]['subtitles']
|
||||
if self.subtitles:
|
||||
self.subtitles = 1
|
||||
else:
|
||||
self.subtitles = 0
|
||||
|
||||
self.dvdorder = sqlResults[0]["dvdorder"]
|
||||
self.dvdorder = sqlResults[0]['dvdorder']
|
||||
if not self.dvdorder:
|
||||
self.dvdorder = 0
|
||||
|
||||
self.archive_firstmatch = sqlResults[0]["archive_firstmatch"]
|
||||
self.archive_firstmatch = sqlResults[0]['archive_firstmatch']
|
||||
if not self.archive_firstmatch:
|
||||
self.archive_firstmatch = 0
|
||||
|
||||
self.quality = int(sqlResults[0]["quality"])
|
||||
self.flatten_folders = int(sqlResults[0]["flatten_folders"])
|
||||
self.paused = int(sqlResults[0]["paused"])
|
||||
self.quality = int(sqlResults[0]['quality'])
|
||||
self.flatten_folders = int(sqlResults[0]['flatten_folders'])
|
||||
self.paused = int(sqlResults[0]['paused'])
|
||||
|
||||
try:
|
||||
self.location = sqlResults[0]["location"]
|
||||
self.location = sqlResults[0]['location']
|
||||
except Exception:
|
||||
dirty_setter("_location")(self, sqlResults[0]["location"])
|
||||
dirty_setter('_location')(self, sqlResults[0]['location'])
|
||||
self._isDirGood = False
|
||||
|
||||
if not self.lang:
|
||||
self.lang = sqlResults[0]["lang"]
|
||||
self.lang = sqlResults[0]['lang']
|
||||
|
||||
self.last_update_indexer = sqlResults[0]["last_update_indexer"]
|
||||
self.last_update_indexer = sqlResults[0]['last_update_indexer']
|
||||
|
||||
self.rls_ignore_words = sqlResults[0]["rls_ignore_words"]
|
||||
self.rls_require_words = sqlResults[0]["rls_require_words"]
|
||||
self.rls_ignore_words = sqlResults[0]['rls_ignore_words']
|
||||
self.rls_require_words = sqlResults[0]['rls_require_words']
|
||||
|
||||
if not self.imdbid:
|
||||
self.imdbid = sqlResults[0]["imdb_id"]
|
||||
self.imdbid = sqlResults[0]['imdb_id']
|
||||
|
||||
if self.is_anime:
|
||||
self.release_groups = BlackAndWhiteList(self.indexerid)
|
||||
if self.is_anime:
|
||||
self.release_groups = BlackAndWhiteList(self.indexerid)
|
||||
|
||||
logger.log(str(self.indexerid) + u': Show info [%s] loaded from database' % self.name)
|
||||
|
||||
# Get IMDb_info from database
|
||||
myDB = db.DBConnection()
|
||||
sqlResults = myDB.select("SELECT * FROM imdb_info WHERE indexer_id = ?", [self.indexerid])
|
||||
|
||||
if len(sqlResults) == 0:
|
||||
logger.log(str(self.indexerid) + ": Unable to find IMDb show info in the database")
|
||||
if 0 == len(sqlResults):
|
||||
logger.log(str(self.indexerid) + ': Unable to find IMDb show info in the database for [%s]' % self.name)
|
||||
return
|
||||
else:
|
||||
self.imdb_info = dict(zip(sqlResults[0].keys(), sqlResults[0]))
|
||||
|
||||
self.imdb_info = dict(zip(sqlResults[0].keys(), sqlResults[0]))
|
||||
|
||||
self.dirty = False
|
||||
return True
|
||||
|
|
Loading…
Reference in a new issue