Merge pull request #786 from JackDandy/feature/FixAiredInSeason

Change allow pp to replace files with a repack or proper of same qual…
This commit is contained in:
JackDandy 2016-10-01 11:31:20 +01:00 committed by GitHub
commit 5c3ec65813
9 changed files with 70 additions and 41 deletions

View file

@ -170,6 +170,10 @@
source and there is no media file
* Change only show unaired episodes on Manage/Backlog Overview and Manage/Episode Status Management where relevant
* Change locally cache "Add from Trakt" show posters, first run takes more time but is faster thereafter
* Change allow pp to replace files with a repack or proper of same quality
* Fix ensure downloaded eps are not shown on episode view
* Fix allow propers to pp when show marked upgrade once
* Fix never set episodes without airdate to wanted
[develop changelog]
* Change send nzb data to NZBGet for Anizb instead of url

View file

@ -45,19 +45,23 @@ class MainSanityCheck(db.DBSanityCheck):
for cur_duplicate in sql_results:
logger.log(u'Duplicate show detected! ' + column + ': ' + str(cur_duplicate[column]) + u' count: ' + str(
cur_duplicate['count']), logger.DEBUG)
logger.log(u'Duplicate show detected! %s: %s count: %s' % (column, cur_duplicate[column],
cur_duplicate['count']), logger.DEBUG)
cur_dupe_results = self.connection.select(
'SELECT show_id, ' + column + ' FROM tv_shows WHERE ' + column + ' = ? LIMIT ?',
[cur_duplicate[column], int(cur_duplicate['count']) - 1]
)
cl = []
for cur_dupe_id in cur_dupe_results:
logger.log(
u'Deleting duplicate show with ' + column + ': ' + str(cur_dupe_id[column]) + u' show_id: ' + str(
cur_dupe_id['show_id']))
self.connection.action('DELETE FROM tv_shows WHERE show_id = ?', [cur_dupe_id['show_id']])
u'Deleting duplicate show with %s: %s show_id: %s' % (column, cur_dupe_id[column],
cur_dupe_id['show_id']))
cl.append(['DELETE FROM tv_shows WHERE show_id = ?', [cur_dupe_id['show_id']]])
if 0 < len(cl):
self.connection.mass_action(cl)
else:
logger.log(u'No duplicate show, check passed')
@ -69,10 +73,9 @@ class MainSanityCheck(db.DBSanityCheck):
for cur_duplicate in sql_results:
logger.log(u'Duplicate episode detected! showid: ' + str(cur_duplicate['showid']) + u' season: '
+ str(cur_duplicate['season']) + u' episode: ' + str(cur_duplicate['episode']) + u' count: '
+ str(cur_duplicate['count']),
logger.DEBUG)
logger.log(u'Duplicate episode detected! showid: %s season: %s episode: %s count: %s' %
(cur_duplicate['showid'], cur_duplicate['season'], cur_duplicate['episode'],
cur_duplicate['count']), logger.DEBUG)
cur_dupe_results = self.connection.select(
'SELECT episode_id FROM tv_episodes WHERE showid = ? AND season = ? and episode = ? ORDER BY episode_id DESC LIMIT ?',
@ -80,9 +83,13 @@ class MainSanityCheck(db.DBSanityCheck):
int(cur_duplicate['count']) - 1]
)
cl = []
for cur_dupe_id in cur_dupe_results:
logger.log(u'Deleting duplicate episode with episode_id: ' + str(cur_dupe_id['episode_id']))
self.connection.action('DELETE FROM tv_episodes WHERE episode_id = ?', [cur_dupe_id['episode_id']])
logger.log(u'Deleting duplicate episode with episode_id: %s' % cur_dupe_id['episode_id'])
cl.append(['DELETE FROM tv_episodes WHERE episode_id = ?', [cur_dupe_id['episode_id']]])
if 0 < len(cl):
self.connection.mass_action(cl)
else:
logger.log(u'No duplicate episode, check passed')
@ -92,11 +99,15 @@ class MainSanityCheck(db.DBSanityCheck):
sql_results = self.connection.select(
'SELECT episode_id, showid, tv_shows.indexer_id FROM tv_episodes LEFT JOIN tv_shows ON tv_episodes.showid=tv_shows.indexer_id WHERE tv_shows.indexer_id is NULL')
cl = []
for cur_orphan in sql_results:
logger.log(u'Orphan episode detected! episode_id: ' + str(cur_orphan['episode_id']) + ' showid: ' + str(
cur_orphan['showid']), logger.DEBUG)
logger.log(u'Deleting orphan episode with episode_id: ' + str(cur_orphan['episode_id']))
self.connection.action('DELETE FROM tv_episodes WHERE episode_id = ?', [cur_orphan['episode_id']])
logger.log(u'Orphan episode detected! episode_id: %s showid: %s' % (cur_orphan['episode_id'],
cur_orphan['showid']), logger.DEBUG)
logger.log(u'Deleting orphan episode with episode_id: %s' % cur_orphan['episode_id'])
cl.append(['DELETE FROM tv_episodes WHERE episode_id = ?', [cur_orphan['episode_id']]])
if 0 < len(cl):
self.connection.mass_action(cl)
else:
logger.log(u'No orphan episodes, check passed')
@ -128,18 +139,22 @@ class MainSanityCheck(db.DBSanityCheck):
def fix_unaired_episodes(self):
cur_date = datetime.date.today()
cur_date = datetime.date.today() + datetime.timedelta(days=1)
sql_results = self.connection.select(
'SELECT episode_id, showid FROM tv_episodes WHERE status = ? or airdate > ? AND status in (?,?)', ['',
cur_date.toordinal(), common.SKIPPED, common.WANTED])
'SELECT episode_id, showid FROM tv_episodes WHERE status = ? or ( airdate > ? AND status in (?,?) ) or '
'( airdate <= 1 AND status = ? )', ['', cur_date.toordinal(), common.SKIPPED, common.WANTED, common.WANTED])
cl = []
for cur_unaired in sql_results:
logger.log(u'UNAIRED episode detected! episode_id: ' + str(cur_unaired['episode_id']) + ' showid: ' + str(
cur_unaired['showid']), logger.DEBUG)
logger.log(u'Fixing unaired episode status with episode_id: ' + str(cur_unaired['episode_id']))
self.connection.action('UPDATE tv_episodes SET status = ? WHERE episode_id = ?',
[common.UNAIRED, cur_unaired['episode_id']])
logger.log(u'UNAIRED episode detected! episode_id: %s showid: %s' % (cur_unaired['episode_id'],
cur_unaired['showid']), logger.DEBUG)
logger.log(u'Fixing unaired episode status with episode_id: %s' % cur_unaired['episode_id'])
cl.append(['UPDATE tv_episodes SET status = ? WHERE episode_id = ?',
[common.UNAIRED, cur_unaired['episode_id']]])
if 0 < len(cl):
self.connection.mass_action(cl)
else:
logger.log(u'No UNAIRED episodes, check passed')

View file

@ -28,13 +28,14 @@ from sickbeard.name_parser.parser import NameParser, InvalidNameException, Inval
class FailedProcessor(object):
"""Take appropriate action when a download fails to complete"""
def __init__(self, dirName, nzbName):
def __init__(self, dirName, nzbName, showObj=None):
"""
dirName: Full path to the folder of the failed download
nzbName: Full name of the nzb file that failed
"""
self.dir_name = dirName
self.nzb_name = nzbName
self.show = showObj
self.log = ""
@ -47,7 +48,7 @@ class FailedProcessor(object):
raise exceptions.FailedProcessingFailed()
try:
parser = NameParser(False, convert=True)
parser = NameParser(False, showObj=self.show, convert=True)
parsed = parser.parse(releaseName)
except InvalidNameException:
self._log(u"Error: release name is invalid: " + releaseName, logger.DEBUG)

View file

@ -748,7 +748,7 @@ class PostProcessor(object):
self._log(u'Existing episode status is not downloaded/archived, marking it safe to replace', logger.DEBUG)
return True
if common.ARCHIVED == old_ep_status:
if common.ARCHIVED == old_ep_status and common.Quality.NONE == old_ep_quality:
self._log(u'Marking it unsafe to replace because the existing episode status is archived', logger.DEBUG)
return False
@ -771,6 +771,10 @@ class PostProcessor(object):
# if there's an existing downloaded file with same quality, check filesize to decide
if new_ep_quality == old_ep_quality:
if re.search(r'\bproper|repack\b', self.nzb_name, re.I) or re.search(r'\bproper|repack\b', self.file_name, re.I):
self._log(u'Proper or repack with same quality, marking it safe to replace', logger.DEBUG)
return True
self._log(u'An episode exists in the database with the same quality as the episode to process', logger.DEBUG)
existing_file_status = self._check_for_existing_file(ep_obj.location)
@ -911,10 +915,13 @@ class PostProcessor(object):
cur_ep.release_name = self.release_name or ''
any_qualities, best_qualities = common.Quality.splitQuality(cur_ep.show.quality)
cur_ep.status = common.Quality.compositeStatus(
**({'status': common.DOWNLOADED, 'quality': new_ep_quality},
{'status': common.ARCHIVED, 'quality': new_ep_quality})
[ep_obj.status in common.Quality.SNATCHED_BEST])
[ep_obj.status in common.Quality.SNATCHED_BEST or
(cur_ep.show.archive_firstmatch and new_ep_quality in best_qualities)])
cur_ep.release_group = self.release_group or ''

View file

@ -188,7 +188,7 @@ class ProcessTVShow(object):
# if we didn't find a real directory then process "failed" or just quit
if not dir_name or not ek.ek(os.path.isdir, dir_name):
if nzb_name and failed:
self._process_failed(dir_name, nzb_name)
self._process_failed(dir_name, nzb_name, showObj=showObj)
else:
self._log_helper(u'Unable to figure out what folder to process. ' +
u'If your downloader and SickGear aren\'t on the same PC then make sure ' +
@ -215,7 +215,7 @@ class ProcessTVShow(object):
path, filter(helpers.is_first_rar_volume, files), pp_type, process_method)
rar_content = self._unrar(path, rar_files, force)
if self.fail_detected:
self._process_failed(dir_name, nzb_name)
self._process_failed(dir_name, nzb_name, showObj=showObj)
return self.result
path, dirs, files = self._get_path_dir_files(dir_name, nzb_name, pp_type)
video_files = filter(helpers.has_media_ext, files)
@ -280,7 +280,7 @@ class ProcessTVShow(object):
rar_content = self._unrar(walk_path, rar_files, force)
work_files += [ek.ek(os.path.join, walk_path, item) for item in rar_content]
if self.fail_detected:
self._process_failed(dir_name, nzb_name)
self._process_failed(dir_name, nzb_name, showObj=showObj)
continue
files = list(set(files + rar_content))
video_files = filter(helpers.has_media_ext, files)
@ -401,7 +401,7 @@ class ProcessTVShow(object):
return False
if failed:
self._process_failed(os.path.join(path, dir_name), nzb_name_original)
self._process_failed(os.path.join(path, dir_name), nzb_name_original, showObj=showObj)
return False
if helpers.is_hidden_folder(dir_name):
@ -820,14 +820,14 @@ class ProcessTVShow(object):
return path, dirs, files
# noinspection PyArgumentList
def _process_failed(self, dir_name, nzb_name):
def _process_failed(self, dir_name, nzb_name, showObj=None):
""" Process a download that did not complete correctly """
if sickbeard.USE_FAILED_DOWNLOADS:
processor = None
try:
processor = failedProcessor.FailedProcessor(dir_name, nzb_name)
processor = failedProcessor.FailedProcessor(dir_name, nzb_name, showObj)
self._set_process_success(processor.process())
process_fail_message = ''
except exceptions.FailedProcessingFailed as e:

View file

@ -30,7 +30,7 @@ from sickbeard import helpers, logger, show_name_helpers
from sickbeard import search
from sickbeard import history
from sickbeard.common import DOWNLOADED, SNATCHED, SNATCHED_PROPER, Quality
from sickbeard.common import DOWNLOADED, SNATCHED, SNATCHED_PROPER, Quality, ARCHIVED, SNATCHED_BEST
from name_parser.parser import NameParser, InvalidNameException, InvalidShowException
@ -173,7 +173,8 @@ def _get_proper_list(aired_since_shows, recent_shows, recent_anime):
# only keep the proper if we have already retrieved the same quality ep (don't get better/worse ones)
old_status, old_quality = Quality.splitCompositeStatus(int(sql_results[0]['status']))
if old_status not in (DOWNLOADED, SNATCHED) or cur_proper.quality != old_quality:
if old_status not in (DOWNLOADED, SNATCHED, SNATCHED_BEST, ARCHIVED) \
or cur_proper.quality != old_quality:
continue
# check if we actually want this proper (if it's the right release group and a higher version)

View file

@ -390,7 +390,7 @@ def wanted_episodes(show, from_date, make_dict=False, unaired=False):
if (common.Quality.UNKNOWN != i and cur_quality < i)]
ep_obj.eps_aired_in_season = ep_count.get(helpers.tryInt(result['season']), 0)
ep_obj.eps_aired_in_scene_season = ep_count_scene.get(
helpers.tryInt(result['scene_season']), 0) if result['scene_season'] else None
helpers.tryInt(result['scene_season']), 0) if result['scene_season'] else ep_obj.eps_aired_in_season
if make_dict:
wanted.setdefault(ep_obj.scene_season if ep_obj.show.is_scene else ep_obj.season, []).append(ep_obj)
else:

View file

@ -265,7 +265,7 @@ class RecentSearchQueueItem(generic_queue.QueueItem):
cur_time = datetime.datetime.now(network_timezones.sb_timezone)
my_db = db.DBConnection()
sql_results = my_db.select('SELECT * FROM tv_episodes WHERE status = ? AND season > 0 AND airdate <= ?',
sql_results = my_db.select('SELECT * FROM tv_episodes WHERE status = ? AND season > 0 AND airdate <= ? AND airdate > 1',
[common.UNAIRED, cur_date])
sql_l = []

View file

@ -43,7 +43,7 @@ from sickbeard import config, sab, nzbget, clients, history, notifiers, processT
from sickbeard import encodingKludge as ek
from sickbeard.providers import newznab, rsstorrent
from sickbeard.common import Quality, Overview, statusStrings, qualityPresetStrings
from sickbeard.common import SNATCHED, UNAIRED, IGNORED, ARCHIVED, WANTED, FAILED, SKIPPED
from sickbeard.common import SNATCHED, UNAIRED, IGNORED, ARCHIVED, WANTED, FAILED, SKIPPED, DOWNLOADED, SNATCHED_BEST, SNATCHED_PROPER
from sickbeard.common import SD, HD720p, HD1080p
from sickbeard.exceptions import ex
from sickbeard.helpers import remove_article, starify
@ -483,7 +483,8 @@ class MainHandler(WebHandler):
# make a dict out of the sql results
sql_results = [dict(row) for row in sql_results
if Quality.splitCompositeStatus(helpers.tryInt(row['status']))[0] not in qualities]
if Quality.splitCompositeStatus(helpers.tryInt(row['status']))[0] not in
[DOWNLOADED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, ARCHIVED, IGNORED]]
# multi dimension sort
sorts = {
@ -1877,7 +1878,7 @@ class Home(MainHandler):
continue
if int(
status) in Quality.DOWNLOADED and epObj.status not in Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.DOWNLOADED + [
status) in Quality.DOWNLOADED and epObj.status not in Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST + Quality.DOWNLOADED + [
IGNORED, SKIPPED] and not ek.ek(os.path.isfile, epObj.location):
logger.log(
u'Refusing to change status of ' + curEp + " to DOWNLOADED because it's not SNATCHED/DOWNLOADED",
@ -1885,7 +1886,7 @@ class Home(MainHandler):
continue
if int(
status) == FAILED and epObj.status not in Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.DOWNLOADED:
status) == FAILED and epObj.status not in Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST + Quality.DOWNLOADED:
logger.log(
u'Refusing to change status of ' + curEp + " to FAILED because it's not SNATCHED/DOWNLOADED",
logger.ERROR)