Merge pull request #359 from ressu/feature/FixUndoQuery

Fix previously processed file detection
This commit is contained in:
JackDandy 2015-05-16 16:16:11 +01:00
commit da99eeaedf
4 changed files with 104 additions and 65 deletions

View file

@ -59,6 +59,7 @@
* Change disable the Force buttons on the Manage Searches page while a search is running
* Change staggered periods of testing and updating of all shows "ended" status up to 460 days
* Change "Archive" to "Upgrade to" in Edit show and other places and improve related texts for clarity
* Fix history consolidation to only update an episode status if the history disagrees with the status.
[develop changelog]
* Fix issue, when adding existing shows, set its default group to ensure it now appears on the show list page

View file

@ -94,3 +94,33 @@ def logFailed(epObj, release, provider=None):
action = Quality.compositeStatus(FAILED, quality)
_logHistoryItem(action, showid, season, epNum, quality, release, provider)
def reset_status(indexerid, season, episode):
''' Revert episode history to status from download history,
if history exists '''
my_db = db.DBConnection()
history_sql = 'SELECT h.action, h.showid, h.season, h.episode,'\
' t.status FROM history AS h INNER JOIN tv_episodes AS t'\
' ON h.showid = t.showid AND h.season = t.season'\
' AND h.episode = t.episode WHERE t.showid = ? AND t.season = ?'\
' AND t.episode = ? GROUP BY h.action ORDER BY h.date DESC limit 1'
sql_history = my_db.select(history_sql, [str(indexerid),
str(season),
str(episode)])
if len(sql_history) == 1:
history = sql_history[0]
# update status only if status differs
# FIXME: this causes issues if the user changed status manually
# replicating refactored behavior anyway.
if history['status'] != history['action']:
undo_status = 'UPDATE tv_episodes SET status = ?'\
' WHERE showid = ? AND season = ? AND episode = ?'
my_db.action(undo_status, [history['action'],
history['showid'],
history['season'],
history['episode']])

View file

@ -31,6 +31,7 @@ from sickbeard.exceptions import ex
from sickbeard import logger
from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException
from sickbeard import common
from sickbeard.history import reset_status
from sickbeard import failedProcessor
@ -420,34 +421,34 @@ class ProcessTVShow(object):
parse_result = None
try:
parse_result = NameParser(try_indexers=True, try_scene_exceptions=True, convert=True).parse(videofile, cache_result=False)
parse_result = NameParser(try_indexers=True,
try_scene_exceptions=True,
convert=True).parse(videofile,
cache_result=False)
except (InvalidNameException, InvalidShowException):
# Does not parse, move on to directory check
pass
if None is parse_result:
try:
parse_result = NameParser(try_indexers=True, try_scene_exceptions=True, convert=True).parse(dir_name, cache_result=False)
parse_result = NameParser(try_indexers=True,
try_scene_exceptions=True,
convert=True).parse(
dir_name, cache_result=False)
except (InvalidNameException, InvalidShowException):
pass
# If the filename doesn't parse, then return false as last
# resort. We can assume that unparseable filenames are not
# processed in the past
return False
showlink = (' for "<a href="/home/displayShow?show=%s" target="_blank">%s</a>"' % (parse_result.show.indexerid, parse_result.show.name),
parse_result.show.name)[self.any_vid_processed]
showlink = ''
ep_detail_sql = ''
undo_status = None
if parse_result:
showlink = (' for "<a href="/home/displayShow?show=%s" target="_blank">%s</a>"' % (parse_result.show.indexerid, parse_result.show.name),
parse_result.show.name)[self.any_vid_processed]
if parse_result.show.indexerid and parse_result.episode_numbers and parse_result.season_number:
ep_detail_sql = " and tv_episodes.showid='%s' and tv_episodes.season='%s' and tv_episodes.episode='%s'"\
% (str(parse_result.show.indexerid),
str(parse_result.season_number),
str(parse_result.episode_numbers[0]))
undo_status = "UPDATE `tv_episodes` SET status="\
+ "(SELECT h.action FROM `history` as h INNER JOIN `tv_episodes` as t on h.showid=t.showid"\
+ " where t.showid='%s' and t.season='%s' and t.episode='%s'"\
% (str(parse_result.show.indexerid), str(parse_result.season_number), str(parse_result.episode_numbers[0]))\
+ " and (h.action is not t.status) group by h.action order by h.date DESC LIMIT 1)"\
+ " where showid='%s' and season='%s' and episode='%s'"\
% (str(parse_result.show.indexerid), str(parse_result.season_number), str(parse_result.episode_numbers[0]))
if parse_result.show.indexerid and parse_result.episode_numbers and parse_result.season_number:
ep_detail_sql = " and tv_episodes.showid='%s' and tv_episodes.season='%s' and tv_episodes.episode='%s'"\
% (str(parse_result.show.indexerid),
str(parse_result.season_number),
str(parse_result.episode_numbers[0]))
# Avoid processing the same directory again if we use a process method <> move
my_db = db.DBConnection()
@ -455,7 +456,9 @@ class ProcessTVShow(object):
if sql_result:
self._log_helper(u'Found a release directory%s that has already been processed,<br />.. skipping: %s'
% (showlink, dir_name))
my_db.action(undo_status)
reset_status(parse_result.show.indexerid,
parse_result.season_number,
parse_result.episode_numbers[0])
return True
else:
@ -467,7 +470,9 @@ class ProcessTVShow(object):
if sql_result:
self._log_helper(u'Found a video, but that release%s was already processed,<br />.. skipping: %s'
% (showlink, videofile))
my_db.action(undo_status)
reset_status(parse_result.show.indexerid,
parse_result.season_number,
parse_result.episode_numbers[0])
return True
# Needed if we have downloaded the same episode @ different quality
@ -482,7 +487,9 @@ class ProcessTVShow(object):
if sql_result:
self._log_helper(u'Found a video, but the episode%s is already processed,<br />.. skipping: %s'
% (showlink, videofile))
my_db.action(undo_status)
reset_status(parse_result.show.indexerid,
parse_result.season_number,
parse_result.episode_numbers[0])
return True
return False

View file

@ -1556,63 +1556,64 @@ class TVEpisode(object):
def loadFromDB(self, season, episode):
logger.log(
str(self.show.indexerid) + u": Loading episode details from DB for episode " + str(season) + "x" + str(
str(self.show.indexerid) + u': Loading episode details from DB for episode ' + str(season) + 'x' + str(
episode), logger.DEBUG)
myDB = db.DBConnection()
sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?",
sql_results = myDB.select('SELECT * FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?',
[self.show.indexerid, season, episode])
if len(sqlResults) > 1:
raise exceptions.MultipleDBEpisodesException("Your DB has two records for the same show somehow.")
elif len(sqlResults) == 0:
logger.log(str(self.show.indexerid) + u": Episode " + str(self.season) + "x" + str(
self.episode) + " not found in the database", logger.DEBUG)
if len(sql_results) > 1:
raise exceptions.MultipleDBEpisodesException('Your DB has two records for the same show somehow.')
elif len(sql_results) == 0:
logger.log(str(self.show.indexerid) + u': Episode ' + str(self.season) + 'x' + str(
self.episode) + ' not found in the database', logger.DEBUG)
return False
else:
# NAMEIT logger.log(u"AAAAA from" + str(self.season)+"x"+str(self.episode) + " -" + self.name + " to " + str(sqlResults[0]["name"]))
if sqlResults[0]["name"]:
self.name = sqlResults[0]["name"]
# NAMEIT logger.log(u'AAAAA from' + str(self.season)+'x'+str(self.episode) + ' -' + self.name + ' to ' + str(sql_results[0]['name']))
if sql_results[0]['name']:
self.name = sql_results[0]['name']
self.season = season
self.episode = episode
self.absolute_number = sqlResults[0]["absolute_number"]
self.description = sqlResults[0]["description"]
self.absolute_number = sql_results[0]['absolute_number']
self.description = sql_results[0]['description']
if not self.description:
self.description = ""
if sqlResults[0]["subtitles"] and sqlResults[0]["subtitles"]:
self.subtitles = sqlResults[0]["subtitles"].split(",")
self.subtitles_searchcount = sqlResults[0]["subtitles_searchcount"]
self.subtitles_lastsearch = sqlResults[0]["subtitles_lastsearch"]
self.airdate = datetime.date.fromordinal(int(sqlResults[0]["airdate"]))
# logger.log(u"1 Status changes from " + str(self.status) + " to " + str(sqlResults[0]["status"]), logger.DEBUG)
self.status = int(sqlResults[0]["status"])
self.description = ''
if sql_results[0]['subtitles'] and sql_results[0]['subtitles']:
self.subtitles = sql_results[0]['subtitles'].split(',')
self.subtitles_searchcount = sql_results[0]['subtitles_searchcount']
self.subtitles_lastsearch = sql_results[0]['subtitles_lastsearch']
self.airdate = datetime.date.fromordinal(int(sql_results[0]['airdate']))
# logger.log(u'1 Status changes from ' + str(self.status) + ' to ' + str(sql_results[0]['status']), logger.DEBUG)
if sql_results[0]['status'] is not None:
self.status = int(sql_results[0]['status'])
# don't overwrite my location
if sqlResults[0]["location"] and sqlResults[0]["location"]:
self.location = os.path.normpath(sqlResults[0]["location"])
if sqlResults[0]["file_size"]:
self.file_size = int(sqlResults[0]["file_size"])
if sql_results[0]['location'] and sql_results[0]['location']:
self.location = os.path.normpath(sql_results[0]['location'])
if sql_results[0]['file_size']:
self.file_size = int(sql_results[0]['file_size'])
else:
self.file_size = 0
self.indexerid = int(sqlResults[0]["indexerid"])
self.indexer = int(sqlResults[0]["indexer"])
self.indexerid = int(sql_results[0]['indexerid'])
self.indexer = int(sql_results[0]['indexer'])
sickbeard.scene_numbering.xem_refresh(self.show.indexerid, self.show.indexer)
try:
self.scene_season = int(sqlResults[0]["scene_season"])
self.scene_season = int(sql_results[0]['scene_season'])
except:
self.scene_season = 0
try:
self.scene_episode = int(sqlResults[0]["scene_episode"])
self.scene_episode = int(sql_results[0]['scene_episode'])
except:
self.scene_episode = 0
try:
self.scene_absolute_number = int(sqlResults[0]["scene_absolute_number"])
self.scene_absolute_number = int(sql_results[0]['scene_absolute_number'])
except:
self.scene_absolute_number = 0
@ -1630,17 +1631,17 @@ class TVEpisode(object):
self.season, self.episode
)
if sqlResults[0]["release_name"] is not None:
self.release_name = sqlResults[0]["release_name"]
if sql_results[0]['release_name'] is not None:
self.release_name = sql_results[0]['release_name']
if sqlResults[0]["is_proper"]:
self.is_proper = int(sqlResults[0]["is_proper"])
if sql_results[0]['is_proper']:
self.is_proper = int(sql_results[0]['is_proper'])
if sqlResults[0]["version"]:
self.version = int(sqlResults[0]["version"])
if sql_results[0]['version']:
self.version = int(sql_results[0]['version'])
if sqlResults[0]["release_group"] is not None:
self.release_group = sqlResults[0]["release_group"]
if sql_results[0]['release_group'] is not None:
self.release_group = sql_results[0]['release_group']
self.dirty = False
return True
@ -2020,7 +2021,7 @@ class TVEpisode(object):
def saveToDB(self, forceSave=False):
"""
Saves this episode to the database if any of its data has been changed since the last save.
forceSave: If True it will save to the database even if no data has been changed since the
last save (aka if the record is not dirty).
"""
@ -2091,7 +2092,7 @@ class TVEpisode(object):
Returns the name of this episode in a "pretty" human-readable format. Used for logging
and notifications and such.
Returns: A string representing the episode's name and season/ep numbers
Returns: A string representing the episode's name and season/ep numbers
"""
if self.show.anime and not self.show.scene:
@ -2146,7 +2147,7 @@ class TVEpisode(object):
"""
Generates a replacement map for this episode which maps all possible custom naming patterns to the correct
value for this episode.
Returns: A dict with patterns as the keys and their replacement values as the values.
"""
@ -2391,7 +2392,7 @@ class TVEpisode(object):
return result_name
def proper_path(self):
"""
"""
Figures out the path where this episode SHOULD live according to the renaming rules, relative from the show dir
"""